index
int64
0
0
repo_id
stringlengths
9
205
file_path
stringlengths
31
246
content
stringlengths
1
12.2M
__index_level_0__
int64
0
10k
0
Create_ds/clerezza/api/src/main/java/org/apache
Create_ds/clerezza/api/src/main/java/org/apache/clerezza/Language.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza; /** * Represents a language as expressed by the RDF 4646 language tag * * @author reto */ public class Language { private String id; /** * Constructs the language tag defined by RDF 4646, normalized to lowercase. * * @param id as defined by RDF 4646, normalized to lowercase. */ public Language(String id) { if ((id == null) || (id.equals(""))) { throw new IllegalArgumentException("A language id may not be null or empty"); } this.id = id.toLowerCase(); } @Override public boolean equals(Object other) { if (other == null) { return false; } if (other instanceof Language) { return id.equals(((Language) other).id); } else { return false; } } @Override public int hashCode() { return id.hashCode(); } @Override public String toString() { return id; } }
400
0
Create_ds/clerezza/api/src/main/java/org/apache
Create_ds/clerezza/api/src/main/java/org/apache/clerezza/package-info.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ /** * Apache Clerezza RDF API */ package org.apache.clerezza;
401
0
Create_ds/clerezza/api/src/main/java/org/apache/clerezza
Create_ds/clerezza/api/src/main/java/org/apache/clerezza/event/AddEvent.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.event; import org.apache.clerezza.Graph; import org.apache.clerezza.Triple; /** * This class represent an addition event that occurred on a * <code>TripleCollection</code>. * * @author rbn */ public class AddEvent extends GraphEvent { public AddEvent(Graph graph, Triple triple) { super(graph, triple); } }
402
0
Create_ds/clerezza/api/src/main/java/org/apache/clerezza
Create_ds/clerezza/api/src/main/java/org/apache/clerezza/event/GraphEvent.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.event; import org.apache.clerezza.Graph; import org.apache.clerezza.Triple; /** * This class represent a modification event that occured on a * <code>TripleCollection</code>. A <code>GraphEvent</code> object keeps * information about this event. These information are: The <code>Triple</code> * that was part of the modification, the type of modification (addition or * removal) and the <code>TripleCollection</code> that was modified. * * @author mir */ public class GraphEvent { private Graph graph; private Triple triple; protected GraphEvent(Graph graph, Triple triple) { this.graph = graph; this.triple = triple; } /** * Returns the <code>TripleCollection</code> that was modified in the event. * * @return the graph */ public Graph getGraph() { return graph; } /** * Return the <code>Triple</code> that was part of the modification. * * @return the triple */ public Triple getTriple() { return triple; } }
403
0
Create_ds/clerezza/api/src/main/java/org/apache/clerezza
Create_ds/clerezza/api/src/main/java/org/apache/clerezza/event/GraphListener.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.event; import java.util.List; /** * A class that is interested in graph events implements this interface and * is then added as listener to a <code>ListenableTripleCollection</code> or * one of its subclasses. When the <code>ListenableTripleCollection</code> is * modified, then the <code>GraphListener</code> is notified. * * @author mir */ public interface GraphListener { /** * This method is called when a <code>ListenableTripleCollection</code> was * modified, to which this <code>GraphListener</code> was added. A * <code>List</code> containing <code>GraphEvent</code>s are passed as * argument. The list contains all events in which a triple was part of * the modification that matched the <code>FilterTriple</code> which was passed * as argument when the listener was added. * * @param events */ public void graphChanged(List<GraphEvent> events); }
404
0
Create_ds/clerezza/api/src/main/java/org/apache/clerezza
Create_ds/clerezza/api/src/main/java/org/apache/clerezza/event/RemoveEvent.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.event; import org.apache.clerezza.Graph; import org.apache.clerezza.Triple; /** * This class represent a removal event that occured on a * <code>TripleCollection</code>. * * @author rbn */ public class RemoveEvent extends GraphEvent { public RemoveEvent(Graph graph, Triple triple) { super(graph, triple); } }
405
0
Create_ds/clerezza/api/src/main/java/org/apache/clerezza
Create_ds/clerezza/api/src/main/java/org/apache/clerezza/event/FilterTriple.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.event; import org.apache.clerezza.BlankNodeOrIRI; import org.apache.clerezza.IRI; import org.apache.clerezza.RDFTerm; import org.apache.clerezza.Triple; /** * The <code>FilterTriple</code> class provides a match()-method that tests * if a <code>Triple</code> match a certain triple pattern. * * @author mir */ public class FilterTriple { private BlankNodeOrIRI subject; private IRI predicate; private RDFTerm object; /** * Creates a new <code>FilterTriple</code>. The specified subject, * predicate and object are used to test a given <code>Triple</code>. Any * of these values can be null, which acts as wildcard in the test. * * @param subject the subject. * @param predicate the predicate. * @param object the object. */ public FilterTriple(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) { this.subject = subject; this.predicate = predicate; this.object = object; } /** * Returns true if the subject, predicate and object of the specified * <code>Triple</code> match the subject, predicate and object of this * <code>FilterTriple</code>. Null values in the <code>FilterTriple</code> * act as wildcards. * * @param triple * @return */ public boolean match(Triple triple) { boolean subjectMatch, predicateMatch, objectMatch; if (this.subject == null) { subjectMatch = true; } else { subjectMatch = this.subject.equals(triple.getSubject()); } if (this.predicate == null) { predicateMatch = true; } else { predicateMatch = this.predicate.equals(triple.getPredicate()); } if (this.object == null) { objectMatch = true; } else { objectMatch = this.object.equals(triple.getObject()); } return subjectMatch && predicateMatch && objectMatch; } @Override public String toString() { return "FilterTriples: " + subject + " " + predicate + " " + object; } }
406
0
Create_ds/clerezza/schemagen/maven-plugin/src/main/java/org/apache/clerezza
Create_ds/clerezza/schemagen/maven-plugin/src/main/java/org/apache/clerezza/ontologiesplugin/OntologyJavaClassCreatorMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.ontologiesplugin; import org.apache.clerezza.schemagen.SchemaGen; import org.apache.clerezza.schemagen.SchemaGenArguments; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.project.MavenProject; import java.io.*; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.util.HashMap; import java.util.Map; import java.util.Properties; /** * Generates java source code out of an ontology described in various RDF * formats. The generated java file contains constants for rdf classes and * properties described by the ontology. * * @goal generate * @phase generate-sources */ public class OntologyJavaClassCreatorMojo extends AbstractMojo { /** * Path to the root directory * * @parameter property="basedir" */ private String baseDir; /** * Path to the root dir of the RDF+XML files. Default is * src/main/ontologies. * * @parameter optional */ private String resourcePath; /** * Namespace of ontologies * * @parameter optional */ private Properties namespaceOfOntology; /** * Additional source directories. * * @parameter optional */ private File[] sources; /** * @parameter property="project" * @required * @readonly */ private MavenProject project; private Map<String, String> supportedFormats = new HashMap<>(); @Override public void execute() throws MojoExecutionException { supportedFormats.put(".nt", "application/n-triples"); supportedFormats.put(".n3", "text/rdf+n3"); supportedFormats.put(".rdf", "application/rdf+xml"); supportedFormats.put(".ttl", "text/turtle"); supportedFormats.put(".turtle", "text/turtle"); File file; if (resourcePath == null) { resourcePath = baseDir + File.separator + "src" + File.separator + "main" + File.separator + "ontologies"; } else if (resourcePath.contains("/")) { resourcePath = resourcePath.replace("/", File.separator); } else if (resourcePath.contains("\\")) { resourcePath = resourcePath.replace("\\", File.separator); } file = new File(resourcePath); checkDir(file); if (sources != null) { for (File source : sources) { project.addCompileSourceRoot(source.getAbsolutePath()); getLog().info("Source directory: " + source + " added."); } } } private void checkDir(File ontologiesDir) { for (File file : ontologiesDir.listFiles()) { if (file.isDirectory()) { checkDir(file); } else { String fileName = file.getName(); int indexOfLastDot = fileName.lastIndexOf("."); if (indexOfLastDot != -1) { String fileEnding = fileName.substring(indexOfLastDot); if (supportedFormats.containsKey(fileEnding)) { createJavaClassFile(file, fileEnding); } } } } } private void createJavaClassFile(final File file, final String fileEnding) { final String fileName = file.getName(); final String absolutePath = file.getAbsolutePath(); final String className = fileName.replace(fileEnding, "").toUpperCase(); final String pathToJavaClass = absolutePath.substring( absolutePath.indexOf(resourcePath) + resourcePath.length() + 1) .replace(fileName, ""); final String packageName = pathToJavaClass.replace(File.separator, ".") + className; SchemaGenArguments arguments = new SchemaGenArguments() { public URL getSchemaUrl() { try { return file.toURI().toURL(); } catch (MalformedURLException e) { getLog().error(e.getMessage(), e); } return null; } public String getNamespace() { if (namespaceOfOntology != null && namespaceOfOntology.containsKey(fileName)) { return namespaceOfOntology.getProperty(fileName); } return null; } public String getFormatIdentifier() { return supportedFormats.get(fileEnding); } public String getClassName() { return packageName; } }; SchemaGen schemaGen; try { schemaGen = new SchemaGen(arguments); } catch (IOException | URISyntaxException e) { getLog().error(e.getMessage(), e); return; } String rootPath = baseDir + File.separator + "target" + File.separator + "generated-sources" + File.separator + "main" + File.separator + "java" + File.separator; File dir = new File(rootPath + pathToJavaClass); dir.mkdirs(); PrintWriter out; try { out = new PrintWriter(new File(rootPath + pathToJavaClass + className + ".java"), "utf-8"); } catch (FileNotFoundException e) { getLog().error(e.getMessage(), e); return; } catch (UnsupportedEncodingException e) { getLog().error(e.getMessage(), e); throw new RuntimeException("utf-8 not supported!"); } try { schemaGen.writeClass(out); } finally { out.flush(); } } }
407
0
Create_ds/clerezza/schemagen/main/src/main/java/org/apache/clerezza
Create_ds/clerezza/schemagen/main/src/main/java/org/apache/clerezza/schemagen/SchemaGenArguments.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.schemagen; import org.wymiwyg.commons.util.arguments.CommandLine; import java.net.URL; /** * @author reto */ public interface SchemaGenArguments { // comments omitted as redundant with description @CommandLine(longName = "schema", shortName = {"S"}, required = true, description = "The URL from which the vocabulary can be retrieved") URL getSchemaUrl(); @CommandLine(longName = "namespace", shortName = {"N"}, description = "Namespace of the vocabulary, by default it uses the URI of a resource of type owl:Ontology found in the vocabulary") String getNamespace(); @CommandLine(longName = "format", shortName = {"F"}, description = "The RDF content-type of the schema (Content-Type in an HTTP-Response is ignored)", defaultValue = "application/rdf+xml") String getFormatIdentifier(); @CommandLine(longName = "classname", shortName = {"C"}, required = true, description = "The fully qualified class name of the class to be created") String getClassName(); }
408
0
Create_ds/clerezza/schemagen/main/src/main/java/org/apache/clerezza
Create_ds/clerezza/schemagen/main/src/main/java/org/apache/clerezza/schemagen/SchemaGen.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.schemagen; import org.apache.clerezza.*; import org.apache.clerezza.implementation.in_memory.SimpleGraph; import org.apache.clerezza.representation.Parser; import org.wymiwyg.commons.util.arguments.AnnotatedInterfaceArguments; import org.wymiwyg.commons.util.arguments.ArgumentHandler; import org.wymiwyg.commons.util.arguments.InvalidArgumentsException; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.net.URISyntaxException; import java.util.*; /** * Generates the source code of a java-class with constants for an ontology described in RDF. * * @author reto */ public class SchemaGen { private final Graph schemaGraph; private final String className; private final String namespace; public static void main(String... args) throws Exception { SchemaGenArguments arguments; try { arguments = new ArgumentHandler(args).getInstance(SchemaGenArguments.class); } catch (InvalidArgumentsException e) { System.out.println(e.getMessage()); System.out.print("Usage: SchemaGen "); System.out.println(AnnotatedInterfaceArguments.getArgumentsSyntax(SchemaGenArguments.class)); PrintWriter out = new PrintWriter(System.out, true); AnnotatedInterfaceArguments.printArgumentDescriptions( SchemaGenArguments.class, out); out.flush(); return; } SchemaGen schemaGen = new SchemaGen(arguments); PrintWriter out = new PrintWriter(System.out); schemaGen.writeClass(out); out.flush(); } /** * Creates an instance doing the transformation as specified by the * arguments. * * @param arguments specification of the transformation * @throws IOException If an IO error occurs. */ public SchemaGen(SchemaGenArguments arguments) throws IOException, URISyntaxException { Parser parser = Parser.getInstance(); InputStream serializedGraph = arguments.getSchemaUrl().openStream(); schemaGraph = parser.parse(serializedGraph, arguments.getFormatIdentifier()); className = arguments.getClassName(); if (arguments.getNamespace() == null) { namespace = getOntologyUri(); } else { namespace = arguments.getNamespace(); } } private String getOntologyUri() { Iterator<Triple> ontologyTriples = schemaGraph.filter(null, RDF.type, OWL.Ontology); String result; if (ontologyTriples.hasNext()) { result = ((IRI) ontologyTriples.next().getSubject()).getUnicodeString(); } else { throw new RuntimeException("No OWL Ontology found!"); } if (ontologyTriples.hasNext()) { throw new RuntimeException("More than one OWL Ontology found!"); } return result; } /** * Writes the generated source code of a java class to the specified * print writer. * * @param out The print writer to write the transformation to. * @throws IllegalArgumentException If out is <code>null</code>. */ public void writeClass(PrintWriter out) { if (out == null) { throw new IllegalArgumentException("Invalid out: out"); } out.print("// Generated by "); out.println(getClass().getName()); String packageName = getPackageName(); if (packageName != null) { out.print("package "); out.print(packageName); out.println(';'); } out.println(); out.println("import org.apache.clerezza.IRI;"); out.println(); out.print("public class "); out.print(getSimpleName()); out.println(" {"); SortedSet<OntologyResource> ontClasses = new TreeSet<>(); ontClasses.addAll(getResourcesOfType(RDFS.Class)); ontClasses.addAll(getResourcesOfType(RDFS.Datatype)); ontClasses.addAll(getResourcesOfType(OWL.Class)); if (ontClasses.size() > 0) { out.println("\t// Classes"); printResources(ontClasses.iterator(), out); } SortedSet<OntologyResource> ontProperties = new TreeSet<>(); //some ontologies defining things that are both classes //and properties, like image in RSS 1.0 - so we remove those ontProperties.addAll(getResourcesOfType(RDF.Property, ontClasses)); ontProperties.addAll(getResourcesOfType(OWL.ObjectProperty, ontClasses)); ontProperties.addAll(getResourcesOfType(OWL.DatatypeProperty, ontClasses)); if (ontProperties.size() > 0) { out.println(); out.println("\t// Properties"); printResources(ontProperties.iterator(), out); } //create a set of classes and properties. Everything else should be instances ontClasses.addAll(ontProperties); Collection<OntologyResource> instances = getResourcesOfType(null, ontClasses); if (instances.size() > 0) { out.println(); out.println("\t// Properties"); printResources(instances.iterator(), out); } out.println("}"); } private void printResources(Iterator<OntologyResource> iterator, PrintWriter out) { while (iterator.hasNext()) { OntologyResource ontologyResource = iterator.next(); String description = ontologyResource.getDescription(); if (description != null) { out.println(); out.println("\t/**"); out.print("\t * "); out.println(description); out.println("\t */"); } out.print("\tpublic static final IRI "); out.print(ontologyResource.getLocalName()); out.print(" = new IRI(\""); out.print(ontologyResource.getUriString()); out.println("\");"); } } private Collection<OntologyResource> getResourcesOfType(IRI type) { return getResourcesOfType(type, null); } /** * @param type the type of the class, or null for all things that are declared to be of a type * @param ignore a set things to ignore * @return the result set of things */ private Collection<OntologyResource> getResourcesOfType(IRI type, Collection<OntologyResource> ignore) { Set<OntologyResource> result = new HashSet<>(); Iterator<Triple> classStatements = schemaGraph.filter(null, RDF.type, type); while (classStatements.hasNext()) { Triple triple = classStatements.next(); BlankNodeOrIRI classResource = triple.getSubject(); if (classResource instanceof BlankNode) { if (type != null) System.err.println("Ignoring anonymous resource of type " + type.getUnicodeString()); else System.err.println("Ignoring anonymous resource"); for (Triple contextTriple : getNodeContext(classResource, schemaGraph)) { System.err.println(contextTriple); } continue; } // Test if the given resource belongs to the ontology final IRI classUri = (IRI) classResource; final String strClassUri = classUri.getUnicodeString(); if (strClassUri.startsWith(namespace)) { // The remaining part of the class URI must not contain // a slash '/' or a hash '#' character. Otherwise we assume // that is belongs to another ontology. final int offset = namespace.length(); int idxSlash = strClassUri.indexOf('/', offset); int idxHash = strClassUri.indexOf('#', offset); // Note that we generously ignore the first character of the // remaining part that may be a '/' or a '#' because the // namespace may not end with such a character. if (idxSlash <= offset && idxHash <= offset) { OntologyResource ontologyResource = new OntologyResource(classUri, schemaGraph); if (ignore == null || !ignore.contains(ontologyResource)) result.add(ontologyResource); } } } return result; } private ImmutableGraph getNodeContext(BlankNodeOrIRI resource, Graph graph) { final HashSet<BlankNode> dontExpand = new HashSet<>(); if (resource instanceof BlankNode) { dontExpand.add((BlankNode) resource); } return getContextOf(resource, dontExpand, graph).getImmutableGraph(); } private Graph getContextOf(BlankNodeOrIRI node, Set<BlankNode> dontExpand, Graph graph) { Graph result = new SimpleGraph(); Iterator<Triple> forwardProperties = graph.filter(node, null, null); while (forwardProperties.hasNext()) { Triple triple = forwardProperties.next(); result.add(triple); RDFTerm object = triple.getObject(); if (object instanceof BlankNode) { BlankNode bNodeObject = (BlankNode) object; if (!dontExpand.contains(bNodeObject)) { dontExpand.add(bNodeObject); result.addAll(getContextOf(bNodeObject, dontExpand, graph)); } } } Iterator<Triple> backwardProperties = graph.filter(null, null, node); while (backwardProperties.hasNext()) { Triple triple = backwardProperties.next(); result.add(triple); BlankNodeOrIRI subject = triple.getSubject(); if (subject instanceof BlankNode) { BlankNode bNodeSubject = (BlankNode) subject; if (!dontExpand.contains(bNodeSubject)) { dontExpand.add(bNodeSubject); result.addAll(getContextOf(bNodeSubject, dontExpand, graph)); } } } return result; } private String getSimpleName() { int lastDotPos = className.lastIndexOf('.'); if (lastDotPos == -1) { return className; } return className.substring(lastDotPos + 1); } private String getPackageName() { int lastDotPos = className.lastIndexOf('.'); if (lastDotPos == -1) { return null; } return className.substring(0, lastDotPos); } private static class OntologyResource implements Comparable<OntologyResource> { static final List<String> reservedWords = Arrays.asList( "abstract", "assert", "boolean", "break", "byte", "case", "catch", "char", "class", "const", "continue", "default", "do", "double", "else", "enum", "extends", "false", "final", "finally", "float", "for", "goto", "if", "implements", "import", "instanceof", "int", "interface", "long", "native", "new", "null", "package", "private", "protected", "public", "return", "short", "static", "strictfp", "super", "switch", "synchronized", "this", "throw", "throws", "transient", "true", "try", "void", "volatile", "while"); final Graph graph; final IRI uri; OntologyResource(IRI uri, Graph graph) { this.uri = uri; this.graph = graph; } String getLocalName() { String uriValue = uri.getUnicodeString(); int hashPos = uriValue.lastIndexOf('#'); int slashPos = uriValue.lastIndexOf('/'); int delimiter = Math.max(hashPos, slashPos); String val = uriValue.substring(delimiter + 1); if (val.length() == 0) return "THIS_ONTOLOGY"; //replace bad characters... val = val.replace('-', '_').replace('.', '_'); return reservedWords.contains(val) ? val + "_" : val; } String getUriString() { return uri.getUnicodeString(); } String getDescription() { StringBuilder result = new StringBuilder(); Iterator<Triple> titleStatements = graph.filter( uri, DCTERMS.title, null); while (titleStatements.hasNext()) { RDFTerm object = titleStatements.next().getObject(); if (object instanceof Literal) { result.append("title: "); result.append(((Literal) object).getLexicalForm()); result.append("\n"); } } Iterator<Triple> descriptionStatements = graph.filter( uri, DCTERMS.description, null); while (descriptionStatements.hasNext()) { RDFTerm object = descriptionStatements.next().getObject(); if (object instanceof Literal) { result.append("{@literal description: "); result.append(((Literal) object).getLexicalForm()); result.append("}\n"); } } Iterator<Triple> skosDefStatements = graph.filter( uri, SKOS.definition, null); while (skosDefStatements.hasNext()) { RDFTerm object = skosDefStatements.next().getObject(); if (object instanceof Literal) { result.append("{@literal definition: "); result.append(((Literal) object).getLexicalForm()); result.append("}\n"); } } Iterator<Triple> rdfsCommentStatements = graph.filter( uri, RDFS.comment, null); while (rdfsCommentStatements.hasNext()) { RDFTerm object = rdfsCommentStatements.next().getObject(); if (object instanceof Literal) { // Use {@literal ...} to avoid javadoc complaining about "malformed HTML" in some texts result.append("{@literal comment: "); result.append(((Literal) object).getLexicalForm()); result.append("}\n"); } } Iterator<Triple> skosNoteStatements = graph.filter( uri, SKOS.note, null); while (skosNoteStatements.hasNext()) { RDFTerm object = skosNoteStatements.next().getObject(); if (object instanceof Literal) { result.append("{@literal note: "); result.append(((Literal) object).getLexicalForm()); result.append("}\n"); } } Iterator<Triple> skosExampleStatements = graph.filter( uri, SKOS.example, null); while (skosExampleStatements.hasNext()) { RDFTerm object = skosExampleStatements.next().getObject(); if (object instanceof Literal) { result.append("{@literal example: "); result.append(((Literal) object).getLexicalForm()); result.append("}\n"); } else if (object instanceof IRI) { result.append("see <a href=").append(((IRI) object).getUnicodeString()).append(">example</a>"); result.append("\n"); } } return result.toString(); } @Override public int hashCode() { return getUriString().hashCode(); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } else if (obj == this) { return true; } else if (OntologyResource.class.equals(obj.getClass())) { final OntologyResource other = (OntologyResource) obj; return getUriString().equals(other.getUriString()); } return false; } @Override public int compareTo(OntologyResource o) { return getUriString().compareTo(o.getUriString()); } } /* * Ontology vocabs are re-defined here and not imported to avoid a maven dependency loop */ /** * OWL Ontology. */ private static class OWL { private static final String NS = "http://www.w3.org/2002/07/owl#"; public static final RDFTerm Ontology = new IRI(NS + "Ontology"); private static final IRI Class = new IRI(NS + "Class"); private static final IRI DatatypeProperty = new IRI(NS + "DatatypeProperty"); private static final IRI ObjectProperty = new IRI(NS + "ObjectProperty"); } /** * RDFS Ontology. */ private static class RDFS { private static final String NS = "http://www.w3.org/2000/01/rdf-schema#"; private static final IRI Class = new IRI(NS + "Class"); private static final IRI Datatype = new IRI(NS + "Datatype"); private static final IRI comment = new IRI(NS + "comment"); } /** * RDF Ontology. */ private static class RDF { private static final String NS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; private static final IRI Property = new IRI(NS + "Property"); private static final IRI type = new IRI(NS + "type"); } private static class SKOS { static final IRI definition = new IRI("http://www.w3.org/2008/05/skos#definition"); static final IRI note = new IRI("http://www.w3.org/2004/02/skos/core#note"); static final IRI example = new IRI("http://www.w3.org/2004/02/skos/core#example"); } private static class DCTERMS { public static final IRI title = new IRI("http://purl.org/dc/terms/title"); public static final IRI description = new IRI("http://purl.org/dc/terms/description"); } }
409
0
Create_ds/clerezza/test.utils/src/main/java/org/apache/clerezza/test
Create_ds/clerezza/test.utils/src/main/java/org/apache/clerezza/test/utils/LockableMGraphWrapperForTesting.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.test.utils; import org.apache.clerezza.*; import java.util.Iterator; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * Wrappes an Graph as a LockableGraph. If a method is called that reads * or modifies the wrapped graph and the appropriate lock is not set, then a * RuntimeException is thrown. * * @author rbn, mir */ public class LockableMGraphWrapperForTesting extends GraphWrapper { private final ReentrantReadWriteLock lock = (ReentrantReadWriteLock) getLock(); private final Lock readLock = lock.readLock(); private final Graph wrapped; /** * Constructs a LocalbleGraph for an Graph. * * @param providedGraph a non-lockable mgraph */ public LockableMGraphWrapperForTesting(final Graph providedGraph) { super(providedGraph); this.wrapped = providedGraph; } @Override public Iterator<Triple> filter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) { LockChecker.checkIfReadLocked(lock); readLock.lock(); try { return new LockingIteratorForTesting(wrapped.filter(subject, predicate, object), lock); } finally { readLock.unlock(); } } @Override public Iterator<Triple> iterator() { LockChecker.checkIfReadLocked(lock); readLock.lock(); try { return new LockingIteratorForTesting(wrapped.iterator(), lock); } finally { readLock.unlock(); } } }
410
0
Create_ds/clerezza/test.utils/src/main/java/org/apache/clerezza/test
Create_ds/clerezza/test.utils/src/main/java/org/apache/clerezza/test/utils/LockingIteratorForTesting.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.test.utils; import org.apache.clerezza.Triple; import java.util.Iterator; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * Wrapps an iterator<Triple> reading all elements to a cache on construction * and returning them from that cache. * @author reto */ class LockingIteratorForTesting implements Iterator<Triple> { private Iterator<Triple> base; private Lock readLock; private Lock writeLock; private ReentrantReadWriteLock lock; public LockingIteratorForTesting(Iterator<Triple> iterator, ReentrantReadWriteLock lock) { base = iterator; readLock = lock.readLock(); writeLock = lock.writeLock(); this.lock = lock; } @Override public boolean hasNext() { LockChecker.checkIfReadLocked(lock); readLock.lock(); try { return base.hasNext(); } finally { readLock.unlock(); } } @Override public Triple next() { LockChecker.checkIfReadLocked(lock); readLock.lock(); try { return base.next(); } finally { readLock.unlock(); } } @Override public void remove() { LockChecker.checkIfWriteLocked(lock); writeLock.lock(); try { base.remove(); } finally { writeLock.unlock(); } } }
411
0
Create_ds/clerezza/test.utils/src/main/java/org/apache/clerezza/test
Create_ds/clerezza/test.utils/src/main/java/org/apache/clerezza/test/utils/LockChecker.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.test.utils; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * * @author mir */ class LockChecker { public static void checkIfReadLocked(ReentrantReadWriteLock lock) { if (lock.getReadLockCount() == 0 && !lock.isWriteLockedByCurrentThread()) { throw new RuntimeException("Neither read- nor write-locked"); } } public static void checkIfWriteLocked(ReentrantReadWriteLock lock) { if (!lock.isWriteLockedByCurrentThread()) { throw new RuntimeException("Not write-locked"); } } }
412
0
Create_ds/clerezza/test.utils/src/main/java/org/apache/clerezza/test
Create_ds/clerezza/test.utils/src/main/java/org/apache/clerezza/test/utils/GraphTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.test.utils; import org.apache.clerezza.*; import org.apache.clerezza.implementation.TripleImpl; import org.apache.clerezza.implementation.literal.PlainLiteralImpl; import org.apache.clerezza.implementation.literal.TypedLiteralImpl; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.platform.runner.JUnitPlatform; import org.junit.runner.RunWith; import java.util.*; /** * A generic abstract test class, implementations overwrite this class, * providing an implementation of the getEmptyGraph method. * * @author reto, szalay, mir, hhn */ @RunWith(JUnitPlatform.class) public abstract class GraphTest { private final IRI uriRef1 = new IRI("http://example.org/ontology#res1"); private final IRI uriRef2 = new IRI("http://example.org/ontology#res2"); private final IRI uriRef3 = new IRI("http://example.org/ontology#res3"); private final IRI uriRef4 = new IRI("http://example.org/ontology#res4"); private final IRI xmlLiteralType = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#XMLLiteral"); private Literal literal1 = new PlainLiteralImpl("literal1"); private Literal literal2 = new PlainLiteralImpl("literal2"); private BlankNode bnode1 = new BlankNode(); private BlankNode bnode2 = new BlankNode(); private Triple trpl1 = new TripleImpl(uriRef2, uriRef2, literal1); private Triple trpl2 = new TripleImpl(uriRef1, uriRef2, uriRef1); private Triple trpl3 = new TripleImpl(bnode2, uriRef3, literal2); private Triple trpl4 = new TripleImpl(uriRef3, uriRef4, literal2); /** * Subclasses implement this method to provide implementation instances of * Graph. This method may be called an arbitrary amount of time, * independently whether previously returned Graph are still in use or not. * * @return an empty Graph of the implementation to be tested */ protected abstract Graph getEmptyGraph(); @Test public void testAddCountAndGetTriples() { Graph graph = getEmptyGraph(); Assertions.assertEquals(0, graph.size()); final TripleImpl triple1 = new TripleImpl(uriRef1, uriRef2, uriRef1); graph.add(triple1); Assertions.assertEquals(1, graph.size()); Iterator<Triple> tripleIter = graph.filter(uriRef1, uriRef2, uriRef1); Assertions.assertTrue(tripleIter.hasNext()); Triple tripleGot = tripleIter.next(); Assertions.assertEquals(triple1, tripleGot); Assertions.assertFalse(tripleIter.hasNext()); BlankNode bnode = new BlankNode() {}; graph.add(new TripleImpl(bnode, uriRef1, uriRef3)); graph.add(new TripleImpl(bnode, uriRef1, uriRef4)); tripleIter = graph.filter(null, uriRef1, null); Set<BlankNodeOrIRI> subjectInMatchingTriples = new HashSet<BlankNodeOrIRI>(); Set<RDFTerm> objectsInMatchingTriples = new HashSet<RDFTerm>(); while (tripleIter.hasNext()) { Triple triple = tripleIter.next(); subjectInMatchingTriples.add(triple.getSubject()); objectsInMatchingTriples.add(triple.getObject()); } Assertions.assertEquals(1, subjectInMatchingTriples.size()); Assertions.assertEquals(2, objectsInMatchingTriples.size()); Set<RDFTerm> expectedObjects = new HashSet<RDFTerm>(); expectedObjects.add(uriRef3); expectedObjects.add(uriRef4); Assertions.assertEquals(expectedObjects, objectsInMatchingTriples); graph.add(new TripleImpl(bnode, uriRef4, bnode)); tripleIter = graph.filter(null, uriRef4, null); Assertions.assertTrue(tripleIter.hasNext()); Triple retrievedTriple = tripleIter.next(); Assertions.assertFalse(tripleIter.hasNext()); Assertions.assertEquals(retrievedTriple.getSubject(), retrievedTriple.getObject()); tripleIter = graph.filter(uriRef1, uriRef2, null); Assertions.assertTrue(tripleIter.hasNext()); retrievedTriple = tripleIter.next(); Assertions.assertFalse(tripleIter.hasNext()); Assertions.assertEquals(retrievedTriple.getSubject(), retrievedTriple.getObject()); } @Test public void testRemoveAllTriples() { Graph graph = getEmptyGraph(); Assertions.assertEquals(0, graph.size()); graph.add(new TripleImpl(uriRef1, uriRef2, uriRef3)); graph.add(new TripleImpl(uriRef2, uriRef3, uriRef4)); Assertions.assertEquals(2, graph.size()); graph.clear(); Assertions.assertEquals(0, graph.size()); } @Test public void testUseTypedLiterals() { Graph graph = getEmptyGraph(); Assertions.assertEquals(0, graph.size()); Literal value = new TypedLiteralImpl("<elem>value</elem>",xmlLiteralType); final TripleImpl triple1 = new TripleImpl(uriRef1, uriRef2, value); graph.add(triple1); Iterator<Triple> tripleIter = graph.filter(uriRef1, uriRef2, null); Assertions.assertTrue(tripleIter.hasNext()); RDFTerm gotValue = tripleIter.next().getObject(); Assertions.assertEquals(value, gotValue); } @Test public void testUseLanguageLiterals() { Graph graph = getEmptyGraph(); Assertions.assertEquals(0, graph.size()); Language language = new Language("it"); Literal value = new PlainLiteralImpl("<elem>value</elem>",language); final TripleImpl triple1 = new TripleImpl(uriRef1, uriRef2, value); graph.add(triple1); Iterator<Triple> tripleIter = graph.filter(uriRef1, uriRef2, null); Assertions.assertTrue(tripleIter.hasNext()); RDFTerm gotValue = tripleIter.next().getObject(); Assertions.assertEquals(value, gotValue); Assertions.assertEquals(language, ((Literal)gotValue).getLanguage()); } @Test public void testRemoveViaIterator() { Graph graph = getEmptyGraph(); Assertions.assertEquals(0, graph.size()); final TripleImpl triple1 = new TripleImpl(uriRef1, uriRef2, uriRef1); graph.add(triple1); final TripleImpl triple2 = new TripleImpl(uriRef1, uriRef2, uriRef4); graph.add(triple2); Assertions.assertEquals(2, graph.size()); Iterator<Triple> iterator = graph.iterator(); while (iterator.hasNext()) { iterator.next(); iterator.remove(); } Assertions.assertEquals(0, graph.size()); } @Test public void testGetSize() throws Exception { Graph graph = getEmptyGraph(); // The test graph must always be empty after test fixture setup Assertions.assertEquals(0, graph.size()); } @Test public void testAddSingleTriple() throws Exception { Graph graph = getEmptyGraph(); final Triple triple= createTriple( "http://example.org/ontology/Person", "http://example.org/ontology/hasName", "http://example.org/people/alice"); Assertions.assertEquals(0, graph.size()); Assertions.assertTrue(graph.add(triple)); Assertions.assertEquals(1, graph.size()); } @Test public void testAddSameTripleTwice() throws Exception { Graph graph = getEmptyGraph(); final Triple triple= createTriple( "http://example.org/ontology/Person", "http://example.org/ontology/hasName", "http://example.org/people/alice"); Assertions.assertEquals(0, graph.size()); Assertions.assertTrue(graph.add(triple)); Assertions.assertFalse(graph.add(triple)); // ImmutableGraph does not change Assertions.assertEquals(1, graph.size()); } @Test public void testRemoveSingleTriple() throws Exception { Graph graph = getEmptyGraph(); final Triple triple= createTriple( "http://example.org/ontology/Person", "http://example.org/ontology/hasName", "http://example.org/people/alice"); Assertions.assertTrue(graph.add(triple)); Assertions.assertTrue(graph.remove(triple)); Assertions.assertEquals(0, graph.size()); } @Test public void testRemoveSameTripleTwice() throws Exception { Graph graph = getEmptyGraph(); final Triple tripleAlice= createTriple( "http://example.org/ontology/Person", "http://example.org/ontology/hasName", "http://example.org/people/alice"); final Triple tripleBob= createTriple( "http://example.org/ontology/Person", "http://example.org/ontology/hasName", "http://example.org/people/bob"); Assertions.assertTrue(graph.add(tripleAlice)); Assertions.assertTrue(graph.add(tripleBob)); Assertions.assertTrue(graph.remove(tripleAlice)); Assertions.assertFalse(graph.remove(tripleAlice)); Assertions.assertEquals(1, graph.size()); } @Test public void testGetSameBlankNode() throws Exception { Graph graph = getEmptyGraph(); BlankNode bNode = new BlankNode(); final IRI HAS_NAME = new IRI("http://example.org/ontology/hasName"); final PlainLiteralImpl name = new PlainLiteralImpl("http://example.org/people/alice"); final PlainLiteralImpl name2 = new PlainLiteralImpl("http://example.org/people/bob"); final Triple tripleAlice = new TripleImpl(bNode, HAS_NAME, name); final Triple tripleBob = new TripleImpl(bNode, HAS_NAME, name2); Assertions.assertTrue(graph.add(tripleAlice)); Assertions.assertTrue(graph.add(tripleBob)); Iterator<Triple> result = graph.filter(null, HAS_NAME, name); Assertions.assertEquals(bNode, result.next().getSubject()); } @Test public void testContainsIfContained() throws Exception { Graph graph = getEmptyGraph(); final Triple triple= createTriple( "http://example.org/ontology/Person", "http://example.org/ontology/hasName", "http://example.org/people/alice"); Assertions.assertTrue(graph.add(triple)); Assertions.assertTrue(graph.contains(triple)); } @Test public void testContainsIfEmpty() throws Exception { Graph graph = getEmptyGraph(); final Triple triple= createTriple( "http://example.org/ontology/Person", "http://example.org/ontology/hasName", "http://example.org/people/alice"); Assertions.assertFalse(graph.contains(triple)); } @Test public void testContainsIfNotContained() throws Exception { Graph graph = getEmptyGraph(); final Triple tripleAdd= createTriple( "http://example.org/ontology/Person", "http://example.org/ontology/hasName", "http://example.org/people/alice"); final Triple tripleTest= createTriple( "http://example.org/ontology/Person", "http://example.org/ontology/hasName", "http://example.org/people/bob"); Assertions.assertTrue(graph.add(tripleAdd)); Assertions.assertFalse(graph.contains(tripleTest)); } @Test public void testFilterEmptyGraph() throws Exception { Graph graph = getEmptyGraph(); Iterator<Triple> i = graph.filter(null, null, null); Assertions.assertFalse(i.hasNext()); } @Test public void testFilterSingleEntry() throws Exception { Graph graph = getEmptyGraph(); final Triple triple= createTriple( "http://example.org/ontology/Person", "http://example.org/ontology/hasName", "http://example.org/people/alice"); Assertions.assertTrue(graph.add(triple)); Iterator<Triple> i = graph.filter(null, null, null); Collection<Triple> resultSet= toCollection(i); Assertions.assertEquals(1, resultSet.size()); Assertions.assertTrue(resultSet.contains(triple)); } @Test public void testFilterByObject() throws Exception { Graph graph = getEmptyGraph(); final Triple tripleAlice= createTriple( "http://example.org/ontology/Person", "http://example.org/ontology/hasName", "http://example.org/people/alice"); final Triple tripleBob= createTriple( "http://example.org/ontology/Person", "http://example.org/ontology/hasName", "http://example.org/people/bob"); Assertions.assertTrue(graph.add(tripleAlice)); Assertions.assertTrue(graph.add(tripleBob)); Iterator<Triple> iterator; Collection<Triple> resultSet; // Find bob iterator = graph.filter(null, null, new IRI("http://example.org/people/bob")); resultSet= toCollection(iterator); Assertions.assertEquals(1, resultSet.size()); Assertions.assertTrue(resultSet.contains(tripleBob)); // Find alice iterator = graph.filter(null, null, new IRI("http://example.org/people/alice")); resultSet= toCollection(iterator); Assertions.assertEquals(1, resultSet.size()); Assertions.assertTrue(resultSet.contains(tripleAlice)); // Find both iterator = graph.filter(null, null, null); resultSet= toCollection(iterator); Assertions.assertEquals(2, resultSet.size()); Assertions.assertTrue(resultSet.contains(tripleAlice)); Assertions.assertTrue(resultSet.contains(tripleBob)); } /* @Test public void graphEventTestAddRemove() { Graph mGraph = getEmptyGraph(); TestGraphListener listener = new TestGraphListener(); mGraph.addGraphListener(listener, new FilterTriple(uriRef1, uriRef2, null)); mGraph.addGraphListener(listener, new FilterTriple(bnode2, null, literal2)); mGraph.addGraphListener(listener, new FilterTriple(null, uriRef4, literal2)); mGraph.add(trpl1); Assertions.assertNull(listener.getEvents()); mGraph.add(trpl2); Assertions.assertEquals(1, listener.getEvents().size()); Assertions.assertEquals(trpl2, listener.getEvents().get(0).getTriple()); Assertions.assertTrue(listener.getEvents().get(0) instanceof AddEvent); listener.resetEvents(); mGraph.remove(trpl2); Assertions.assertEquals(1, listener.getEvents().size()); Assertions.assertEquals(trpl2, listener.getEvents().get(0).getTriple()); Assertions.assertTrue(listener.getEvents().get(0) instanceof RemoveEvent); listener.resetEvents(); mGraph.add(trpl3); Assertions.assertEquals(1, listener.getEvents().size()); Assertions.assertEquals(trpl3, listener.getEvents().get(0).getTriple()); Assertions.assertTrue(listener.getEvents().get(0) instanceof AddEvent); listener.resetEvents(); mGraph.remove(trpl4); Assertions.assertNull(listener.getEvents()); } @Test public void graphEventTestAddAllRemoveAll() { Graph mGraph = getEmptyGraph(); TestGraphListener listener = new TestGraphListener(); mGraph.addGraphListener(listener, new FilterTriple(uriRef1, uriRef2, null)); mGraph.addGraphListener(listener, new FilterTriple(bnode2, null, literal2)); mGraph.addGraphListener(listener, new FilterTriple(null, uriRef4, literal2)); Graph triples = new SimpleGraph(); triples.add(trpl1); triples.add(trpl2); triples.add(trpl3); triples.add(trpl4); mGraph.addAll(triples); List<GraphEvent> cumulatedEvents = listener.getCumulatedEvents(); Set<Triple> cumulatedTriples = getCumulatedTriples(cumulatedEvents); Assertions.assertEquals(3, cumulatedEvents.size()); Assertions.assertTrue(cumulatedEvents.get(0) instanceof AddEvent); Assertions.assertTrue(cumulatedTriples.contains(trpl2)); Assertions.assertTrue(cumulatedTriples.contains(trpl3)); Assertions.assertTrue(cumulatedTriples.contains(trpl4)); listener.resetCumulatedEvents(); mGraph.removeAll(triples); cumulatedEvents = listener.getCumulatedEvents(); cumulatedTriples = getCumulatedTriples(cumulatedEvents); Assertions.assertEquals(3, cumulatedEvents.size()); Assertions.assertTrue(cumulatedEvents.get(0) instanceof RemoveEvent); Assertions.assertTrue(cumulatedTriples.contains(trpl2)); Assertions.assertTrue(cumulatedTriples.contains(trpl3)); Assertions.assertTrue(cumulatedTriples.contains(trpl4)); } @Test public void graphEventTestFilterRemove() { Graph mGraph = getEmptyGraph(); TestGraphListener listener = new TestGraphListener(); mGraph.addGraphListener(listener, new FilterTriple(uriRef1, uriRef2, null)); mGraph.addGraphListener(listener, new FilterTriple(bnode2, null, literal2)); mGraph.addGraphListener(listener, new FilterTriple(null, uriRef4, literal2)); mGraph.add(trpl1); mGraph.add(trpl2); mGraph.add(trpl3); mGraph.add(trpl4); listener.resetCumulatedEvents(); Iterator<Triple> result = mGraph.filter(null, uriRef2, null); while (result.hasNext()) { result.next(); result.remove(); } List<GraphEvent> cumulatedEvents = listener.getCumulatedEvents(); Assertions.assertEquals(1, cumulatedEvents.size()); Assertions.assertTrue(cumulatedEvents.get(0) instanceof RemoveEvent); Assertions.assertEquals(trpl2, listener.getEvents().get(0).getTriple()); } @Test public void graphEventTestIteratorRemove() { Graph mGraph = getEmptyGraph(); TestGraphListener listener = new TestGraphListener(); mGraph.addGraphListener(listener, new FilterTriple(uriRef1, uriRef2, null)); mGraph.addGraphListener(listener, new FilterTriple(bnode2, null, literal2)); mGraph.addGraphListener(listener, new FilterTriple(null, uriRef4, literal2)); mGraph.add(trpl1); mGraph.add(trpl2); mGraph.add(trpl3); mGraph.add(trpl4); listener.resetCumulatedEvents(); Iterator<Triple> result = mGraph.iterator(); while (result.hasNext()) { result.next(); result.remove(); } List<GraphEvent> cumulatedEvents = listener.getCumulatedEvents(); Set<Triple> cumulatedTriples = getCumulatedTriples(cumulatedEvents); Assertions.assertEquals(3, cumulatedEvents.size()); Assertions.assertTrue(cumulatedEvents.get(0) instanceof RemoveEvent); Assertions.assertTrue(cumulatedTriples.contains(trpl2)); Assertions.assertTrue(cumulatedTriples.contains(trpl3)); Assertions.assertTrue(cumulatedTriples.contains(trpl4)); } @Test public void graphEventTestClear() { Graph mGraph = getEmptyGraph(); TestGraphListener listener = new TestGraphListener(); mGraph.addGraphListener(listener, new FilterTriple(uriRef1, uriRef2, null)); mGraph.addGraphListener(listener, new FilterTriple(bnode2, null, literal2)); mGraph.addGraphListener(listener, new FilterTriple(null, uriRef4, literal2)); mGraph.add(trpl1); mGraph.add(trpl2); mGraph.add(trpl3); mGraph.add(trpl4); listener.resetCumulatedEvents(); mGraph.clear(); List<GraphEvent> cumulatedEvents = listener.getCumulatedEvents(); Set<Triple> cumulatedTriples = getCumulatedTriples(cumulatedEvents); Assertions.assertEquals(3, cumulatedEvents.size()); Assertions.assertTrue(cumulatedEvents.get(0) instanceof RemoveEvent); Assertions.assertTrue(cumulatedTriples.contains(trpl2)); Assertions.assertTrue(cumulatedTriples.contains(trpl3)); Assertions.assertTrue(cumulatedTriples.contains(trpl4)); } private Set<Triple> getCumulatedTriples(List<GraphEvent> cumulatedEvents) { Set<Triple> triples = new HashSet<Triple>(); for(GraphEvent event: cumulatedEvents) { triples.add(event.getTriple()); } return triples; } @Test public void graphEventTestWithDelay() throws Exception{ Graph mGraph = getEmptyGraph(); TestGraphListener listener = new TestGraphListener(); mGraph.addGraphListener(listener, new FilterTriple(uriRef1, uriRef2, null), 1000); Triple triple0 = new TripleImpl(uriRef2, uriRef2, literal1); Triple triple1 = new TripleImpl(uriRef1, uriRef2, uriRef1); Triple triple2 = new TripleImpl(uriRef1, uriRef2, literal1); Triple triple3 = new TripleImpl(uriRef1, uriRef2, bnode1); mGraph.add(triple0); mGraph.add(triple1); mGraph.add(triple2); mGraph.add(triple3); Thread.sleep(1500); Assertions.assertEquals(3, listener.getEvents().size()); Assertions.assertEquals(triple1, listener.getEvents().get(0).getTriple()); Assertions.assertTrue(listener.getEvents().get(0) instanceof AddEvent); Assertions.assertEquals(triple2, listener.getEvents().get(1).getTriple()); Assertions.assertTrue(listener.getEvents().get(0) instanceof AddEvent); Assertions.assertEquals(triple3, listener.getEvents().get(2).getTriple()); Assertions.assertTrue(listener.getEvents().get(0) instanceof AddEvent); } private static class TestGraphListener implements GraphListener { private List<GraphEvent> events = null; private List<GraphEvent> cumulatedEvents = new ArrayList<GraphEvent>(); @Override public void graphChanged(List<GraphEvent> events) { this.events = events; Iterator<GraphEvent> eventIter = events.iterator(); while (eventIter.hasNext()) { GraphEvent graphEvent = eventIter.next(); this.cumulatedEvents.add(graphEvent); } } public List<GraphEvent> getEvents() { return events; } public List<GraphEvent> getCumulatedEvents() { return cumulatedEvents; } public void resetEvents() { events = null; } public void resetCumulatedEvents() { cumulatedEvents = new ArrayList<GraphEvent>(); } } */ private Collection<Triple> toCollection(Iterator<Triple> iterator) { Collection<Triple> result = new ArrayList<Triple>(); while (iterator.hasNext()) { result.add(iterator.next()); } return result; } /** * Creates a new <code>Triple</code>. * @param subject the subject. * @param predicate the predicate. * @param object the object. * @throws IllegalArgumentException If an attribute is <code>null</code>. */ private Triple createTriple(String subject, String predicate, String object) { return new TripleImpl(new IRI(subject), new IRI(predicate), new IRI(object)); } }
413
0
Create_ds/clerezza/test.utils/src/main/java/org/apache/clerezza/test
Create_ds/clerezza/test.utils/src/main/java/org/apache/clerezza/test/utils/RandomGraph.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.test.utils; import org.apache.clerezza.*; import org.apache.clerezza.implementation.TripleImpl; import org.apache.clerezza.implementation.literal.PlainLiteralImpl; import org.apache.commons.lang.RandomStringUtils; import java.util.Iterator; import java.util.UUID; /** * A <code>Graph</code> wrapper that allows growing and shrinking of * the wrapped mgraph. * * @author mir */ public class RandomGraph extends GraphWrapper { private int interconnectivity = 2; public RandomGraph(Graph mGraph, int interconnectivity) { super(mGraph); this.interconnectivity = interconnectivity; } /** * Creates a new random mutual graph. * * @param initialSize Determines the initial size of the content graph * @param interconnectivity Determines the probability of using already existing * resource when creating a new triple. The probability of using an existing * resource over creating a new resouce is 1-(1/interconnectivity). * @param mGraph */ public RandomGraph(int initialSize, int interconnectivity, Graph mGraph) { super(mGraph); if (interconnectivity <= 0) { throw new IllegalArgumentException("growth speed and the interconnectivity " + "value have to be equals or highter one"); } this.interconnectivity = interconnectivity; setupInitialSize(initialSize); } /** * Add or removes randomly a triple. * * @return the triple that was added or removed. */ public Triple evolve() { Triple triple; int random = rollDice(2); if (random == 0 && size() != 0) { triple = getRandomTriple(); remove(triple); } else { triple = createRandomTriple(); add(triple); } return triple; } /** * Removes a random triple. * * @return the triple that was removed. */ public Triple removeRandomTriple() { Triple randomTriple = getRandomTriple(); remove(randomTriple); return randomTriple; } /** * Adds a random triple. * * @return the triple that was added. */ public Triple addRandomTriple() { Triple randomTriple; do { randomTriple = createRandomTriple(); } while(contains(randomTriple)); add(randomTriple); return randomTriple; } private Triple createRandomTriple() { return new TripleImpl(getSubject(), getPredicate(), getObject()); } private BlankNodeOrIRI getSubject() { int random = rollDice(interconnectivity); if (size() == 0) { random = 0; } switch (random) { case 0: // create new BlankNodeOrIRI RDFTerm newRDFTerm; do { newRDFTerm = createRandomRDFTerm(); } while (!(newRDFTerm instanceof BlankNodeOrIRI)); return (BlankNodeOrIRI) newRDFTerm; default: // get existing BlankNodeOrIRI RDFTerm existingRDFTerm; do { existingRDFTerm = getExistingRDFTerm(); if (existingRDFTerm == null) { random = 0; } } while (!(existingRDFTerm instanceof BlankNodeOrIRI)); return (BlankNodeOrIRI) existingRDFTerm; } } private IRI getPredicate() { int random = rollDice(interconnectivity); if (size() == 0) { random = 0; } switch (random) { case 0: // create new IRI return createRandomIRI(); default: // get existing IRI RDFTerm existingRDFTerm; do { existingRDFTerm = getExistingRDFTerm(); if (existingRDFTerm == null) { random = 0; } } while (!(existingRDFTerm instanceof IRI)); return (IRI) existingRDFTerm; } } private RDFTerm getObject() { int random = rollDice(interconnectivity); if (size() == 0) { random = 0; } switch (random) { case 0: // create new resource return createRandomRDFTerm(); default: // get existing resource RDFTerm existingRDFTerm = getExistingRDFTerm(); if (existingRDFTerm == null) { random = 0; } return existingRDFTerm; } } private static int rollDice(int faces) { return (int) (Math.random() * faces); } private RDFTerm createRandomRDFTerm() { switch (rollDice(3)) { case 0: return new BlankNode(); case 1: return createRandomIRI(); case 2: return new PlainLiteralImpl(RandomStringUtils.random(rollDice(100) + 1)); } throw new RuntimeException("in createRandomRDFTerm()"); } private RDFTerm getExistingRDFTerm() { Triple triple = getRandomTriple(); if (triple == null) { return null; } switch (rollDice(3)) { case 0: return triple.getSubject(); case 1: return triple.getPredicate(); case 2: return triple.getObject(); } return null; } private IRI createRandomIRI() { return new IRI("http://" + UUID.randomUUID().toString()); } /** * Returns a random triple contained in the Graph. */ public Triple getRandomTriple() { int size = this.size(); if (size == 0) { return null; } Iterator<Triple> triples = iterator(); while (triples.hasNext()) { Triple triple = triples.next(); if (rollDice(this.size()) == 0) { return triple; } } return getRandomTriple(); } private void setupInitialSize(int initialSize) { for (int i = 0; i < initialSize; i++) { addRandomTriple(); } } }
414
0
Create_ds/clerezza/test.utils/src/main/java/org/apache/clerezza/test
Create_ds/clerezza/test.utils/src/main/java/org/apache/clerezza/test/utils/GraphWrapper.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.clerezza.test.utils; import org.apache.clerezza.*; import java.util.Collection; import java.util.Iterator; import java.util.concurrent.locks.ReadWriteLock; /** * * @author mir */ class GraphWrapper implements Graph { protected Graph wrapped; public GraphWrapper(Graph tc) { this.wrapped = tc; } @Override public Iterator<Triple> filter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) { return wrapped.filter(subject, predicate, object); } @Override public int size() { return wrapped.size(); } @Override public boolean isEmpty() { return wrapped.isEmpty(); } @Override public boolean contains(Object o) { return wrapped.contains(o); } @Override public Iterator<Triple> iterator() { return wrapped.iterator(); } @Override public Object[] toArray() { return wrapped.toArray(); } @Override public <T> T[] toArray(T[] a) { return wrapped.toArray(a); } @Override public boolean add(Triple e) { return wrapped.add(e); } @Override public boolean remove(Object o) { return wrapped.remove(o); } @Override public boolean containsAll(Collection<?> c) { return wrapped.containsAll(c); } @Override public boolean addAll(Collection<? extends Triple> c) { return wrapped.addAll(c); } @Override public boolean removeAll(Collection<?> c) { return wrapped.removeAll(c); } @Override public boolean retainAll(Collection<?> c) { return wrapped.retainAll(c); } @Override public void clear() { wrapped.clear(); } @Override public ImmutableGraph getImmutableGraph() { return wrapped.getImmutableGraph(); } @Override public ReadWriteLock getLock() { return wrapped.getLock(); } }
415
0
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/DadminTest.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.sparql; import com.hp.hpl.jena.query.DatasetAccessor; import com.hp.hpl.jena.query.DatasetAccessorFactory; import java.io.IOException; import java.net.ServerSocket; import org.apache.jena.fuseki.EmbeddedFusekiServer; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import java.io.InputStream; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.Language; import org.apache.clerezza.commons.rdf.Literal; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl; import org.apache.clerezza.rdf.core.serializedform.Serializer; import org.apache.clerezza.rdf.core.serializedform.SupportedFormat; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; /** * * @author reto */ public class DadminTest { final static int serverPort = findFreePort(); static EmbeddedFusekiServer server; @BeforeClass public static void prepare() throws IOException { final String serviceURI = "http://localhost:" + serverPort + "/ds/data"; final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI); final InputStream in = DadminTest.class.getResourceAsStream("dadmin.ttl"); final Model m = ModelFactory.createDefaultModel(); String base = "http://example.org/"; m.read(in, base, "TURTLE"); server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath()); server.start(); System.out.println("Started fuseki on port " + serverPort); accessor.putModel(m); } @AfterClass public static void cleanup() { server.stop(); } @Test public void graphSize() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); Assert.assertEquals("Graph not of the exepected size", 1, graph.size()); } @Test public void dump() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); Serializer serializer = Serializer.getInstance(); serializer.serialize(System.out, graph, SupportedFormat.TURTLE); } public static int findFreePort() { int port = 0; try (ServerSocket server = new ServerSocket(0);) { port = server.getLocalPort(); } catch (Exception e) { throw new RuntimeException("unable to find a free port"); } return port; } }
416
0
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/Dadmin2Test.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.sparql; import com.hp.hpl.jena.query.DatasetAccessor; import com.hp.hpl.jena.query.DatasetAccessorFactory; import java.io.IOException; import java.net.ServerSocket; import org.apache.jena.fuseki.EmbeddedFusekiServer; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import java.io.InputStream; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.Language; import org.apache.clerezza.commons.rdf.Literal; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl; import org.apache.clerezza.rdf.core.serializedform.Serializer; import org.apache.clerezza.rdf.core.serializedform.SupportedFormat; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; /** * * @author reto */ public class Dadmin2Test { final static int serverPort = findFreePort(); static EmbeddedFusekiServer server; @BeforeClass public static void prepare() throws IOException { final String serviceURI = "http://localhost:" + serverPort + "/ds/data"; final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI); final InputStream in = Dadmin2Test.class.getResourceAsStream("dadmin2.ttl"); final Model m = ModelFactory.createDefaultModel(); String base = "http://example.org/"; m.read(in, base, "TURTLE"); server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath()); server.start(); System.out.println("Started fuseki on port " + serverPort); accessor.putModel(m); } @AfterClass public static void cleanup() { server.stop(); } @Test public void graphSize() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); Assert.assertEquals("Graph not of the exepected size", 12, graph.size()); } @Test public void dump() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); Serializer serializer = Serializer.getInstance(); serializer.serialize(System.out, graph, SupportedFormat.TURTLE); } public static int findFreePort() { int port = 0; try (ServerSocket server = new ServerSocket(0);) { port = server.getLocalPort(); } catch (Exception e) { throw new RuntimeException("unable to find a free port"); } return port; } }
417
0
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SimilarBNodes.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.sparql; import com.hp.hpl.jena.query.DatasetAccessor; import com.hp.hpl.jena.query.DatasetAccessorFactory; import java.io.IOException; import java.net.ServerSocket; import org.apache.jena.fuseki.EmbeddedFusekiServer; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import java.io.InputStream; import java.util.Iterator; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.Triple; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; /** * * @author reto */ public class SimilarBNodes { final static int serverPort = findFreePort(); static EmbeddedFusekiServer server; @BeforeClass public static void prepare() throws IOException { final String serviceURI = "http://localhost:" + serverPort + "/ds/data"; final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI); final InputStream in = SimilarBNodes.class.getResourceAsStream("similar-bnodes.ttl"); final Model m = ModelFactory.createDefaultModel(); String base = "http://example.org/"; m.read(in, base, "TURTLE"); server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath()); server.start(); System.out.println("Started fuseki on port " + serverPort); accessor.putModel(m); } @AfterClass public static void cleanup() { server.stop(); } @Test public void graphSize() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); Assert.assertEquals("Graph not of the exepected size", 2, graph.size()); } @Test public void foafKnowsFilter() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); final IRI foafKnows = new IRI("http://xmlns.com/foaf/0.1/knows"); final Iterator<Triple> iter = graph.filter(null, foafKnows, null); Assert.assertTrue(iter.hasNext()); final Triple triple1 = iter.next(); final BlankNodeOrIRI subject1 = triple1.getSubject(); Assert.assertTrue(subject1 instanceof BlankNode); Assert.assertTrue(iter.hasNext()); final Triple triple2 = iter.next(); final BlankNodeOrIRI subject2 = triple2.getSubject(); Assert.assertTrue(subject2 instanceof BlankNode); Assert.assertNotEquals(subject1, subject2); } public static int findFreePort() { int port = 0; try (ServerSocket server = new ServerSocket(0);) { port = server.getLocalPort(); } catch (Exception e) { throw new RuntimeException("unable to find a free port"); } return port; } }
418
0
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraphTest.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.sparql; import com.hp.hpl.jena.query.DatasetAccessor; import com.hp.hpl.jena.query.DatasetAccessorFactory; import java.io.IOException; import java.net.ServerSocket; import org.apache.jena.fuseki.EmbeddedFusekiServer; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import java.io.InputStream; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.Language; import org.apache.clerezza.commons.rdf.Literal; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; /** * * @author reto */ public class SparqlGraphTest { final static int serverPort = findFreePort(); static EmbeddedFusekiServer server; @BeforeClass public static void prepare() throws IOException { final String serviceURI = "http://localhost:" + serverPort + "/ds/data"; final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI); final InputStream in = SparqlGraphTest.class.getResourceAsStream("grounded.ttl"); final Model m = ModelFactory.createDefaultModel(); String base = "http://example.org/"; m.read(in, base, "TURTLE"); server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath()); server.start(); System.out.println("Started fuseki on port " + serverPort); accessor.putModel(m); } @AfterClass public static void cleanup() { server.stop(); } @Test public void graphSize() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); Assert.assertEquals("Graph not of the exepected size", 8, graph.size()); } @Test public void filter1() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); final IRI spiderman = new IRI("http://example.org/#spiderman"); final IRI greenGoblin = new IRI("http://example.org/#green-goblin"); final IRI enemyOf = new IRI("http://www.perceive.net/schemas/relationship/enemyOf"); final IRI foafName = new IRI("http://xmlns.com/foaf/0.1/name"); { final Iterator<Triple> iter = graph.filter(spiderman, null, greenGoblin); Assert.assertTrue(iter.hasNext()); Assert.assertEquals(enemyOf, iter.next().getPredicate()); Assert.assertFalse(iter.hasNext()); } { final Iterator<Triple> iter = graph.filter(spiderman, foafName, null); Set<Literal> names = new HashSet<>(); for (int i = 0; i < 2; i++) { Assert.assertTrue(iter.hasNext()); RDFTerm name = iter.next().getObject(); Assert.assertTrue(name instanceof Literal); names.add((Literal)name); } Assert.assertFalse(iter.hasNext()); Assert.assertTrue(names.contains(new PlainLiteralImpl("Spiderman"))); Assert.assertTrue(names.contains(new PlainLiteralImpl("Человек-паук", new Language("ru")))); } } public static int findFreePort() { int port = 0; try (ServerSocket server = new ServerSocket(0);) { port = server.getLocalPort(); } catch (Exception e) { throw new RuntimeException("unable to find a free port"); } return port; } }
419
0
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeTest.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.sparql; import com.hp.hpl.jena.query.DatasetAccessor; import com.hp.hpl.jena.query.DatasetAccessorFactory; import java.io.IOException; import java.net.ServerSocket; import org.apache.jena.fuseki.EmbeddedFusekiServer; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import java.io.InputStream; import java.util.Iterator; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; /** * * @author reto */ public class BNodeTest { final static int serverPort = findFreePort(); static EmbeddedFusekiServer server; @BeforeClass public static void prepare() throws IOException { final String serviceURI = "http://localhost:" + serverPort + "/ds/data"; final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI); final InputStream in = BNodeTest.class.getResourceAsStream("simple-bnode.ttl"); final Model m = ModelFactory.createDefaultModel(); String base = "http://example.org/"; m.read(in, base, "TURTLE"); server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath()); server.start(); System.out.println("Started fuseki on port " + serverPort); accessor.putModel(m); } @AfterClass public static void cleanup() { server.stop(); } @Test public void graphSize() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); Assert.assertEquals("Graph not of the expected size", 3, graph.size()); } /* Filtering with a Bode that cannot be in graph */ @Test public void filterAlienBNode() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); final BlankNode blankNode = new BlankNode(); final Iterator<Triple> iter = graph.filter(blankNode, null, null); Assert.assertFalse(iter.hasNext()); } @Test public void bNodeIdentity() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); final IRI foafPerson = new IRI("http://xmlns.com/foaf/0.1/Person"); final IRI foafName = new IRI("http://xmlns.com/foaf/0.1/name"); final IRI foafKnows = new IRI("http://xmlns.com/foaf/0.1/knows"); final IRI rdfType = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"); final Iterator<Triple> iter = graph.filter(null, foafName, null); Assert.assertTrue(iter.hasNext()); final BlankNodeOrIRI namedThing = iter.next().getSubject(); Assert.assertTrue(namedThing instanceof BlankNode); final Iterator<Triple> iter2 = graph.filter(null, rdfType, foafPerson); Assert.assertTrue(iter2.hasNext()); final BlankNodeOrIRI person = iter2.next().getSubject(); Assert.assertTrue(person instanceof BlankNode); Assert.assertEquals(namedThing, person); final Iterator<Triple> iter3 = graph.filter(null, foafKnows, null); Assert.assertTrue(iter3.hasNext()); final RDFTerm knownThing = iter3.next().getObject(); Assert.assertTrue(knownThing instanceof BlankNode); Assert.assertEquals(knownThing, person); Assert.assertEquals(namedThing, knownThing); } @Test public void filter1() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); final IRI foafPerson = new IRI("http://xmlns.com/foaf/0.1/Person"); final IRI foafName = new IRI("http://xmlns.com/foaf/0.1/name"); final IRI rdfType = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#type"); final Iterator<Triple> iter = graph.filter(null, foafName, null); Assert.assertTrue(iter.hasNext()); final BlankNodeOrIRI person = iter.next().getSubject(); Assert.assertTrue(person instanceof BlankNode); final Iterator<Triple> iter2 = graph.filter(person, rdfType, null); Assert.assertTrue(iter2.hasNext()); } public static int findFreePort() { int port = 0; try (ServerSocket server = new ServerSocket(0);) { port = server.getLocalPort(); } catch (Exception e) { throw new RuntimeException("unable to find a free port"); } return port; } }
420
0
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeCircleTest.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.sparql; import com.hp.hpl.jena.query.DatasetAccessor; import com.hp.hpl.jena.query.DatasetAccessorFactory; import java.io.IOException; import java.net.ServerSocket; import org.apache.jena.fuseki.EmbeddedFusekiServer; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import java.io.InputStream; import java.util.Iterator; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; /** * * @author reto */ public class BNodeCircleTest { final static int serverPort = findFreePort(); static EmbeddedFusekiServer server; @BeforeClass public static void prepare() throws IOException { final String serviceURI = "http://localhost:" + serverPort + "/ds/data"; final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI); final InputStream in = BNodeCircleTest.class.getResourceAsStream("bnode-circle.ttl"); final Model m = ModelFactory.createDefaultModel(); String base = "http://example.org/"; m.read(in, base, "TURTLE"); server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath()); server.start(); System.out.println("Started fuseki on port " + serverPort); accessor.putModel(m); } @AfterClass public static void cleanup() { server.stop(); } @Test public void graphSize() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); Assert.assertEquals("Graph not of the exepected size", 2, graph.size()); } @Test public void nullFilter() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); final Iterator<Triple> iter = graph.filter(null, null, null); Assert.assertTrue(iter.hasNext()); final Triple triple1 = iter.next(); final BlankNodeOrIRI subject = triple1.getSubject(); final RDFTerm object = triple1.getObject(); Assert.assertTrue(subject instanceof BlankNode); Assert.assertTrue(object instanceof BlankNode); Assert.assertNotEquals(subject, object); Assert.assertTrue(iter.hasNext()); } @Test public void foafKnowsFilter() { final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query"); final IRI foafKnows = new IRI("http://xmlns.com/foaf/0.1/knows"); final Iterator<Triple> iter = graph.filter(null, foafKnows, null); Assert.assertTrue(iter.hasNext()); final Triple triple1 = iter.next(); final BlankNodeOrIRI subject = triple1.getSubject(); final RDFTerm object = triple1.getObject(); Assert.assertTrue(subject instanceof BlankNode); Assert.assertTrue(object instanceof BlankNode); Assert.assertNotEquals(subject, object); Assert.assertTrue(iter.hasNext()); } public static int findFreePort() { int port = 0; try (ServerSocket server = new ServerSocket(0);) { port = server.getLocalPort(); } catch (Exception e) { throw new RuntimeException("unable to find a free port"); } return port; } }
421
0
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlClientTest.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.sparql; import com.hp.hpl.jena.query.DatasetAccessor; import com.hp.hpl.jena.query.DatasetAccessorFactory; import java.io.IOException; import java.net.ServerSocket; import org.apache.jena.fuseki.EmbeddedFusekiServer; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import java.io.InputStream; import java.util.List; import java.util.Map; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.RDFTerm; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; /** * * @author reto */ public class SparqlClientTest { final static int serverPort = findFreePort(); static EmbeddedFusekiServer server; @BeforeClass public static void prepare() throws IOException { final String serviceURI = "http://localhost:" + serverPort + "/ds/data"; final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI); final InputStream in = SparqlClientTest.class.getResourceAsStream("grounded.ttl"); final Model m = ModelFactory.createDefaultModel(); String base = "http://example.org/"; m.read(in, base, "TURTLE"); server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath()); server.start(); System.out.println("Started fuseki on port " + serverPort); accessor.putModel(m); } @AfterClass public static void cleanup() { server.stop(); } @Test public void select() throws IOException { final SparqlClient sparqlClient = new SparqlClient( "http://localhost:" + serverPort + "/ds/query"); List<Map<String, RDFTerm>> result = sparqlClient.queryResultSet( "SELECT ?name WHERE { " + "<http://example.org/#spiderman> " + "<http://xmlns.com/foaf/0.1/name> ?name}"); Assert.assertEquals("There should be two names", 2, result.size()); } @Test public void ask() throws IOException { final SparqlClient sparqlClient = new SparqlClient( "http://localhost:" + serverPort + "/ds/query"); Object result = sparqlClient.queryResult( "ASK { " + "<http://example.org/#spiderman> " + "<http://xmlns.com/foaf/0.1/name> ?name}"); Assert.assertEquals("ASK should result to true", Boolean.TRUE, result); } @Test public void desribe() throws IOException { final SparqlClient sparqlClient = new SparqlClient( "http://localhost:" + serverPort + "/ds/query"); Object result = sparqlClient.queryResult( "DESCRIBE <http://example.org/#spiderman>"); Assert.assertTrue("DESCRIBE should return a graph", result instanceof Graph); } public static int findFreePort() { int port = 0; try (ServerSocket server = new ServerSocket(0);) { port = server.getLocalPort(); } catch (Exception e) { throw new RuntimeException("unable to find a free port"); } return port; } }
422
0
Create_ds/clerezza-rdf-core/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlBNode.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.sparql; import java.util.Collection; import java.util.Objects; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.ImmutableGraph; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl; import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph; /** * * @author developer */ class SparqlBNode extends BlankNode { final static IRI internalBNodeId = new IRI("urn:x-internalid:fdmpoihdfw"); final ImmutableGraph context; private final int isoDistinguisher; SparqlBNode(BlankNode node, Collection<Triple> context, int isoDistinguisher) { this.isoDistinguisher = isoDistinguisher; final SimpleGraph contextBuider = new SimpleGraph(); for (Triple triple : context) { BlankNodeOrIRI subject = triple.getSubject(); RDFTerm object = triple.getObject(); contextBuider.add(new TripleImpl(subject.equals(node) ? internalBNodeId : subject, triple.getPredicate(), object.equals(node) ? internalBNodeId : object)); } this.context = contextBuider.getImmutableGraph(); } @Override public int hashCode() { int hash = 7+isoDistinguisher; hash = 61 * hash + Objects.hashCode(this.context); return hash; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final SparqlBNode other = (SparqlBNode) obj; if (isoDistinguisher != other.isoDistinguisher) { return false; } return Objects.equals(this.context, other.context); } }
423
0
Create_ds/clerezza-rdf-core/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraph.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.sparql; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import java.util.concurrent.Callable; import java.util.logging.Logger; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.ImmutableGraph; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.Literal; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.impl.utils.AbstractGraph; import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl; import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph; /** * * @author reto */ public class SparqlGraph extends AbstractGraph { private static final int MAX_ISOMORPHIC_BNODES = 1000; private static final Logger log = Logger.getLogger(SparqlGraph.class.getName()); final SparqlClient sparqlClient; /** * Constructs a Graph representing the default graph at the specified * endpoint */ public SparqlGraph(final String endpoint) { sparqlClient = new SparqlClient(endpoint); } @Override protected Iterator<Triple> performFilter(final BlankNodeOrIRI filterSubject, final IRI filterPredicate, final RDFTerm filterObject) { try { String query = createQuery(filterSubject, filterPredicate, filterObject); final List<Map<String, RDFTerm>> sparqlResults = sparqlClient.queryResultSet(query); //first to triples without bnode-conversion //rawTriples contains the triples with the BNodes from the result set final Collection<Triple> rawTriples = new ArrayList<>(); for (Map<String, RDFTerm> result : sparqlResults) { rawTriples.add(new TripleImpl(filterSubject != null ? filterSubject : (BlankNodeOrIRI) result.get("s"), filterPredicate != null ? filterPredicate : (IRI) result.get("p"), filterObject != null ? filterObject : result.get("o"))); } //then bnode conversion final Iterator<Triple> rawTriplesIter = rawTriples.iterator(); //this is basically just wokring around the lack of (named) nested functions return (new Callable<Iterator<Triple>>() { final Map<BlankNode, SparqlBNode> nodeMap = new HashMap<>(); final Set<ImmutableGraph> usedContext = new HashSet<>(); private RDFTerm useSparqlNode(RDFTerm node) throws IOException { if (node instanceof BlankNodeOrIRI) { return useSparqlNode((BlankNodeOrIRI) node); } return node; } private BlankNodeOrIRI useSparqlNode(BlankNodeOrIRI node) throws IOException { if (node instanceof BlankNode) { if (!nodeMap.containsKey(node)) { createBlankNodesForcontext((BlankNode) node); } if (!nodeMap.containsKey(node)) { throw new RuntimeException("no Bnode created"); } return nodeMap.get(node); } else { return node; } } private void createBlankNodesForcontext(final BlankNode node) throws IOException { final Collection<Triple> context = getContext(node); final Set<BlankNode> rawNodes = new HashSet<>(); for (Triple triple : context) { { final BlankNodeOrIRI subject = triple.getSubject(); if (subject instanceof BlankNode) { rawNodes.add((BlankNode) subject); } } { final RDFTerm object = triple.getObject(); if (object instanceof BlankNode) { rawNodes.add((BlankNode) object); } } } final Set<SparqlBNode> createdSparqlNodes = new HashSet<>(); //final Map<BlankNode, SparqlBNode> preliminaryNodes = new HashMap<>(); for (BlankNode rawNode : rawNodes) { for (int i = 0; i < MAX_ISOMORPHIC_BNODES; i++) { SparqlBNode sparqlBNode = new SparqlBNode(rawNode, context, i); if (!createdSparqlNodes.contains(sparqlBNode)) { nodeMap.put(rawNode, sparqlBNode); createdSparqlNodes.add(sparqlBNode); break; } } } } private ImmutableGraph getContext(final BlankNode node) throws IOException { //we need to get the cntext of the BNode //if the filter was for (null, null, null) we have the whole //bnode context in the reuslt set, otherwise we need to get //more triples from the endpoint, //let's first handle the easy case if ((filterSubject == null) && (filterPredicate == null) && (filterObject == null)) { return getContextInRaw(node); } else { final ImmutableGraph startContext = getContextInRaw(node); final Set<ImmutableGraph> expandedContexts = expandContext(startContext); //expand bnode context //note that there might be different contexts for //a bnode as present in the current result set //in this case we just haveto make sure we don't //pick the same context for different bnodes in the resultset ImmutableGraph result = null; for (ImmutableGraph expandedContext : expandedContexts) { if (!usedContext.contains(expandedContext)) { result = expandedContext; break; } } if (result == null) { log.warning("he underlying sparql graph seems to contain redundant triples, this might cause unexpected results"); result = expandedContexts.iterator().next(); } else { usedContext.add(result); } return result; } } private ImmutableGraph getContextInRaw(BlankNode node) { final Graph contextBuilder = new SimpleGraph(); for (Triple rawTriple : rawTriples) { BlankNodeOrIRI rawSubject = rawTriple.getSubject(); RDFTerm rawObject = rawTriple.getObject(); if (rawSubject.equals(node) || rawObject.equals(node)) { contextBuilder.add(rawTriple); } } return contextBuilder.getImmutableGraph(); } @Override public Iterator<Triple> call() throws Exception { return new Iterator<Triple>() { @Override public boolean hasNext() { return rawTriplesIter.hasNext(); } @Override public Triple next() { try { Triple rawTriple = rawTriplesIter.next(); return new TripleImpl(useSparqlNode(rawTriple.getSubject()), rawTriple.getPredicate(), useSparqlNode(rawTriple.getObject())); } catch (IOException ex) { throw new RuntimeException(ex); } } }; } /** * returns all MSGs that are supergraphs of startContext * * @param startContext * @return */ private Set<ImmutableGraph> expandContext(Collection<Triple> startContext) throws IOException { final StringBuilder queryBuilder = new StringBuilder(); queryBuilder.append("SELECT * WHERE {\n "); Map<BlankNode, String> bNodeVarNameMap = writeTriplePattern(queryBuilder, startContext); Set<BlankNode> bNodesInContext = bNodeVarNameMap.keySet(); for (BlankNode bNode : bNodesInContext) { final String bNodeVarLabel = bNodeVarNameMap.get(bNode); //looking for outgoing properties of the bnode queryBuilder.append("OPTIONAL { "); queryBuilder.append('?'); queryBuilder.append(bNodeVarLabel); queryBuilder.append(' '); queryBuilder.append("?po"); queryBuilder.append(bNodeVarLabel); queryBuilder.append(" ?o"); queryBuilder.append(bNodeVarLabel); queryBuilder.append(" } .\n"); //looking for incoming properties of the bnode queryBuilder.append("OPTIONAL { "); queryBuilder.append("?s"); queryBuilder.append(bNodeVarLabel); queryBuilder.append(' '); queryBuilder.append("?pi"); queryBuilder.append(bNodeVarLabel); queryBuilder.append(" ?"); queryBuilder.append(bNodeVarLabel); queryBuilder.append(" } .\n"); } queryBuilder.append(" }"); final List<Map<String, RDFTerm>> expansionQueryResults = sparqlClient.queryResultSet(queryBuilder.toString()); Set<ImmutableGraph> expandedContexts = new HashSet<>(); //the query results may or may be from disjoint supergraphs //we expand them all as if they are different which may lead //us to the same MSG multiple times RESULTS: for (Map<String, RDFTerm> expansionQueryResult : expansionQueryResults) { Collection<Triple> expandedContext = new HashSet<>(); Map<BlankNode, BlankNode> newBNodesToOldBNodes = new HashMap<>(); for (BlankNode oldBNode : bNodesInContext) { final String bNodeVarLabel = bNodeVarNameMap.get(oldBNode); final RDFTerm newNode = expansionQueryResult.get(bNodeVarLabel); if (!(newNode instanceof BlankNode)) { //this subgraph is't a match continue RESULTS; } newBNodesToOldBNodes.put((BlankNode) newNode, oldBNode); } expandedContext.addAll(startContext); boolean newBNodeIntroduced = false; boolean newTripleAdded = false; for (BlankNode oldBNode : bNodesInContext) { final String bNodeVarLabel = bNodeVarNameMap.get(oldBNode); { final IRI newPredicate = (IRI) expansionQueryResult.get("po" + bNodeVarLabel); if (newPredicate != null) { RDFTerm newObject = expansionQueryResult.get("o" + bNodeVarLabel); if (newObject instanceof BlankNode) { if (newBNodesToOldBNodes.containsKey(newObject)) { //point back to BNode in startContext newObject = newBNodesToOldBNodes.get(newObject); } else { newBNodeIntroduced = true; } } if (expandedContext.add(new TripleImpl(oldBNode, newPredicate, newObject))) { newTripleAdded = true; } } } { final IRI newPredicate = (IRI) expansionQueryResult.get("pi" + bNodeVarLabel); if (newPredicate != null) { RDFTerm newSubject = expansionQueryResult.get("s" + bNodeVarLabel); if (newSubject instanceof BlankNode) { if (newBNodesToOldBNodes.containsKey(newSubject)) { //point back to BNode in startContext newSubject = newBNodesToOldBNodes.get(newSubject); } else { newBNodeIntroduced = true; } } if (expandedContext.add(new TripleImpl((BlankNodeOrIRI) newSubject, newPredicate, oldBNode))) { newTripleAdded = true; } } } } if (newBNodeIntroduced) { //we could be more efficient than this ans just expand the newly introduced bnodes expandedContexts.addAll(expandContext(expandedContext)); } else { if (newTripleAdded) { //look for more results expandedContexts.addAll(expandContext(expandedContext)); //continued iteration obsoleted by recursion break; } } } if (expandedContexts.isEmpty()) { expandedContexts.add(new SimpleGraph(startContext).getImmutableGraph()); } return expandedContexts; } }).call(); } catch (AlienBNodeException e) { return new Iterator<Triple>() { @Override public boolean hasNext() { return false; } @Override public Triple next() { throw new NoSuchElementException(); } }; } catch (IOException ex) { throw new RuntimeException(ex); } catch (Exception ex) { throw new RuntimeException(ex); } } private String createQuery(final BlankNodeOrIRI filterSubject, final IRI filterPredicate, final RDFTerm filterObject) { final StringBuilder selectBuilder = new StringBuilder(); selectBuilder.append("SELECT "); final StringBuilder whereBuilder = new StringBuilder(); whereBuilder.append("WHERE { "); if (filterSubject == null) { whereBuilder.append("?s"); selectBuilder.append("?s "); } else { if (filterSubject instanceof SparqlBNode) { whereBuilder.append("?sn"); } else { whereBuilder.append(asSparqlTerm(filterSubject)); } } whereBuilder.append(' '); if (filterPredicate == null) { whereBuilder.append("?p"); selectBuilder.append("?p "); } else { whereBuilder.append(asSparqlTerm(filterPredicate)); } whereBuilder.append(' '); if (filterObject == null) { whereBuilder.append("?o"); selectBuilder.append("?o "); } else { if (filterObject instanceof SparqlBNode) { whereBuilder.append("?on"); } else { whereBuilder.append(asSparqlTerm(filterObject)); } } whereBuilder.append(" .\n"); if (filterSubject instanceof SparqlBNode) { //expand bnode context writeTriplePattern(whereBuilder, ((SparqlBNode) filterSubject).context, "sn"); } if (filterObject instanceof SparqlBNode) { //expand bnode context writeTriplePattern(whereBuilder, ((SparqlBNode) filterObject).context, "on"); } whereBuilder.append(" }"); return selectBuilder.append(whereBuilder).toString(); } @Override protected int performSize() { try { //TODO replace this with count return sparqlClient.queryResultSet("SELECT * WHERE { ?s ?p ?o}").size(); } catch (IOException ex) { throw new RuntimeException(ex); } } private String asSparqlTerm(IRI iri) { return "<" + iri.getUnicodeString() + ">"; } private String asSparqlTerm(Literal literal) { //TODO langauge and datatype return "\"" + literal.getLexicalForm().replace("\n", "\\n").replace("\"", "\\\"") + "\""; } private String asSparqlTerm(BlankNode bnode) { if (!(bnode instanceof SparqlBNode)) { throw new AlienBNodeException(); } //this requires adding additional clauses to the graph pattern throw new RuntimeException("SparqlBNodes should have been handled earlier"); } private String asSparqlTerm(BlankNodeOrIRI term) { if (term instanceof IRI) { return asSparqlTerm((IRI) term); } else { return asSparqlTerm((BlankNode) term); } } private String asSparqlTerm(RDFTerm term) { if (term instanceof BlankNodeOrIRI) { return asSparqlTerm((BlankNodeOrIRI) term); } else { return asSparqlTerm((Literal) term); } } private Map<BlankNode, String> writeTriplePattern(StringBuilder queryBuilder, Collection<Triple> triples) { return writeTriplePattern(queryBuilder, triples, null); } private Map<BlankNode, String> writeTriplePattern(StringBuilder queryBuilder, Collection<Triple> triples, String varLabelForInternalBNodeId) { final Collection<String> triplePatterns = new ArrayList<>(); int varCounter = 0; final Map<BlankNode, String> bNodeVarNameMap = new HashMap<>(); for (Triple t : triples) { final StringBuilder builder = new StringBuilder(); { final BlankNodeOrIRI s = t.getSubject(); String varName; if (s instanceof BlankNode) { if (bNodeVarNameMap.containsKey(s)) { varName = bNodeVarNameMap.get(s); } else { varName = "v" + (varCounter++); bNodeVarNameMap.put((BlankNode) s, varName); } builder.append('?'); builder.append(varName); } else { if (s.equals(SparqlBNode.internalBNodeId)) { builder.append('?'); builder.append(varLabelForInternalBNodeId); } else { builder.append(asSparqlTerm(s)); } } } builder.append(' '); builder.append(asSparqlTerm(t.getPredicate())); builder.append(' '); { final RDFTerm o = t.getObject(); String varName; if (o instanceof BlankNode) { if (bNodeVarNameMap.containsKey(o)) { varName = bNodeVarNameMap.get(o); } else { varName = "v" + (varCounter++); bNodeVarNameMap.put((BlankNode) o, varName); } builder.append('?'); builder.append(varName); } else { if (o.equals(SparqlBNode.internalBNodeId)) { builder.append('?'); builder.append(varLabelForInternalBNodeId); } else { builder.append(asSparqlTerm(o)); } } } builder.append('.'); triplePatterns.add(builder.toString()); } for (String triplePattern : triplePatterns) { queryBuilder.append(triplePattern); queryBuilder.append('\n'); } return bNodeVarNameMap; } private static class AlienBNodeException extends RuntimeException { public AlienBNodeException() { } } }
424
0
Create_ds/clerezza-rdf-core/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlClient.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.sparql; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.http.HttpEntity; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.rdf.core.serializedform.Parser; /** * * @author developer */ public class SparqlClient { final String endpoint; public SparqlClient(final String endpoint) { this.endpoint = endpoint; } public List<Map<String, RDFTerm>> queryResultSet(final String query) throws IOException { return (List<Map<String, RDFTerm>>) queryResult(query); } public Object queryResult(final String query) throws IOException { CloseableHttpClient httpclient = HttpClients.createDefault(); HttpPost httpPost = new HttpPost(endpoint); List<NameValuePair> nvps = new ArrayList<NameValuePair>(); nvps.add(new BasicNameValuePair("query", query)); httpPost.setEntity(new UrlEncodedFormEntity(nvps)); CloseableHttpResponse response2 = httpclient.execute(httpPost); HttpEntity entity2 = response2.getEntity(); try { InputStream in = entity2.getContent(); final String mediaType = entity2.getContentType().getValue(); if (mediaType.startsWith("application/sparql-results+xml")) { return SparqlResultParser.parse(in); } else { //assuming RDF response //FIXME clerezza-core-rdf to clerezza dependency Parser parser = Parser.getInstance(); return parser.parse(in, mediaType); } } finally { EntityUtils.consume(entity2); response2.close(); } } }
425
0
Create_ds/clerezza-rdf-core/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlResultParser.java
/* * Copyright 2016 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.sparql; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.Language; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.impl.utils.AbstractLiteral; import org.apache.http.util.EntityUtils; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; /** * * @author user */ public class SparqlResultParser { static Object parse(InputStream in) throws IOException { try { SAXParserFactory spf = SAXParserFactory.newInstance(); spf.setNamespaceAware(true); SAXParser saxParser = spf.newSAXParser(); XMLReader xmlReader = saxParser.getXMLReader(); final SparqlsResultsHandler sparqlsResultsHandler = new SparqlsResultsHandler(); xmlReader.setContentHandler(sparqlsResultsHandler); xmlReader.parse(new InputSource(in)); return sparqlsResultsHandler.getResults(); } catch (ParserConfigurationException | SAXException ex) { throw new RuntimeException(ex); } } final public static class SparqlsResultsHandler extends DefaultHandler { private String currentBindingName; private Map<String, RDFTerm> currentResult = null; private Object results = null; private boolean readingValue; private String lang; //the xml:lang attribute of a literal private StringWriter valueWriter; private Map<String, BlankNode> bNodeMap = new HashMap<>(); private static final IRI XSD_STRING = new IRI("http://www.w3.org/2001/XMLSchema#string"); private static final IRI RDF_LANG_STRING = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#langString"); private RDFTerm getBNode(String value) { if (!bNodeMap.containsKey(value)) { bNodeMap.put(value, new BlankNode()); } return bNodeMap.get(value); } private Object getResults() { return results; } private List<Map<String, RDFTerm>> getResultValueMaps() { return (List<Map<String, RDFTerm>>) results; } enum BindingType { uri, bnode, literal; } @Override public void startDocument() throws SAXException { } @Override public void startElement(String namespaceURI, String localName, String qName, Attributes atts) throws SAXException { if ("http://www.w3.org/2005/sparql-results#".equals(namespaceURI)) { if ("boolean".equals(localName)) { if (results != null) { throw new SAXException("unexpected tag <boolean>"); } //results will have Boolean value assigned once value is read readingValue = true; valueWriter = new StringWriter(); } else if ("results".equals(localName)) { if (results != null) { throw new SAXException("unexpected tag <result>"); } results = new ArrayList<Map<String, RDFTerm>>(); } else if ("result".equals(localName)) { if (currentResult != null) { throw new SAXException("unexpected tag <result>"); } currentResult = new HashMap<String, RDFTerm>(); } else if ("binding".equals(localName)) { if (currentResult == null) { throw new SAXException("unexpected tag <binding>"); } currentBindingName = atts.getValue("name"); } else if ("uri".equals(localName) || "bnode".equals(localName) || "literal".equals(localName)) { if (readingValue) { throw new SAXException("unexpected tag <" + localName + ">"); } lang = atts.getValue("http://www.w3.org/XML/1998/namespace", "lang"); readingValue = true; valueWriter = new StringWriter(); } } //System.out.println(namespaceURI); //System.out.println(qName); } @Override public void characters(char[] chars, int start, int length) throws SAXException { if (readingValue) { valueWriter.write(chars, start, length); //System.err.println(value + start + ", " + length); } } @Override public void endElement(String namespaceURI, String localName, String qName) throws SAXException { if ("http://www.w3.org/2005/sparql-results#".equals(namespaceURI)) { if ("result".equals(localName)) { ((List<Map<String, RDFTerm>>) results).add(currentResult); currentResult = null; } else if ("binding".equals(localName)) { if (currentBindingName == null) { throw new SAXException("unexpected tag </binding>"); } currentBindingName = null; } else if ("boolean".equals(localName)) { results = new Boolean(valueWriter.toString()); valueWriter = null; readingValue = false; } else { try { BindingType b = BindingType.valueOf(localName); RDFTerm rdfTerm = null; final Language language = lang == null ? null : new Language(lang);; switch (b) { case uri: rdfTerm = new IRI(valueWriter.toString()); valueWriter = null; break; case bnode: rdfTerm = getBNode(valueWriter.toString()); valueWriter = null; break; case literal: final String lf = valueWriter.toString(); rdfTerm = new AbstractLiteral() { @Override public String getLexicalForm() { return lf; } @Override public IRI getDataType() { if (language != null) { return RDF_LANG_STRING; } //TODO implement return XSD_STRING; } @Override public Language getLanguage() { return language; } @Override public String toString() { return "\"" + getLexicalForm() + "\"@" + getLanguage(); } }; break; } currentResult.put(currentBindingName, rdfTerm); readingValue = false; } catch (IllegalArgumentException e) { //not uri|bnode|literal } } } } public void endDocument() throws SAXException { //System.out.println("results: " + results.size()); } } }
426
0
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/HashMatchingTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching; import java.util.Map; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.junit.Assert; import org.junit.Test; /** * * @author reto */ public class HashMatchingTest { @Test public void twoLine() throws GraphNotIsomorphicException { BlankNodeOrIRI start1 = new BlankNode(); Graph tc1 = Utils4Testing.generateLine(4,start1); tc1.addAll(Utils4Testing.generateLine(5,start1)); BlankNodeOrIRI start2 = new BlankNode(); Graph tc2 = Utils4Testing.generateLine(5,start2); tc2.addAll(Utils4Testing.generateLine(4,start2)); Assert.assertEquals(9, tc1.size()); final Map<BlankNode, BlankNode> mapping = new HashMatching(tc1, tc2).getMatchings(); Assert.assertNotNull(mapping); Assert.assertEquals(10, mapping.size()); } }
427
0
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/PermutationIteratorTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching; import org.apache.clerezza.commons.rdf.impl.utils.graphmatching.PermutationIterator; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.junit.Assert; import org.junit.Test; /** * * @author reto */ public class PermutationIteratorTest { @Test public void simple() { List<String> list = new ArrayList<String>(); PermutationIterator<String> pi = new PermutationIterator<String>(list); Assert.assertFalse(pi.hasNext()); } @Test public void lessSimple() { List<String> list = new ArrayList<String>(); list.add("Hasan"); PermutationIterator<String> pi = new PermutationIterator<String>(list); Assert.assertTrue(pi.hasNext()); } @Test public void regular() { List<String> list = new ArrayList<String>(); list.add("Hasan"); list.add("Tsuy"); PermutationIterator<String> pi = new PermutationIterator<String>(list); Set<List<String>> permutations = new HashSet<List<String>>(); while (pi.hasNext()) { permutations.add(pi.next()); } Assert.assertEquals(2, permutations.size()); } @Test public void extended() { List<String> list = new ArrayList<String>(); list.add("Hasan"); list.add("Tsuy"); list.add("Llena"); PermutationIterator<String> pi = new PermutationIterator<String>(list); Set<List<String>> permutations = new HashSet<List<String>>(); while (pi.hasNext()) { permutations.add(pi.next()); } Assert.assertEquals(6, permutations.size()); } }
428
0
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/Utils4Testing.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleMGraph; import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl; /** * * @author reto */ public class Utils4Testing { static Graph generateLine(int size, final BlankNodeOrIRI firstNode) { if (size < 1) { throw new IllegalArgumentException(); } Graph result = new SimpleMGraph(); BlankNodeOrIRI lastNode = firstNode; for (int i = 0; i < size; i++) { final BlankNode newNode = new BlankNode(); result.add(new TripleImpl(lastNode, u1, newNode)); lastNode = newNode; } return result; } final static IRI u1 = new IRI("http://example.org/u1"); }
429
0
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/GraphMatcherTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching; import org.apache.clerezza.commons.rdf.impl.utils.graphmatching.GraphMatcher; import java.util.Map; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleMGraph; import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl; import org.junit.Assert; import org.junit.Test; /** * * @author reto */ public class GraphMatcherTest { final static IRI u1 = new IRI("http://example.org/u1"); @Test public void testEmpty() { Graph tc1 = new SimpleMGraph(); Graph tc2 = new SimpleMGraph(); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNotNull(mapping); Assert.assertEquals(0, mapping.size()); } @Test public void test2() { Graph tc1 = new SimpleMGraph(); tc1.add(new TripleImpl(u1, u1, u1)); Graph tc2 = new SimpleMGraph(); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNull(mapping); } @Test public void test3() { Graph tc1 = new SimpleMGraph(); tc1.add(new TripleImpl(u1, u1, u1)); Graph tc2 = new SimpleMGraph(); tc2.add(new TripleImpl(u1, u1, u1)); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNotNull(mapping); Assert.assertEquals(0, mapping.size()); } @Test public void test4() { Graph tc1 = new SimpleMGraph(); tc1.add(new TripleImpl(u1, u1, new BlankNode())); Graph tc2 = new SimpleMGraph(); tc2.add(new TripleImpl(u1, u1, new BlankNode())); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNotNull(mapping); Assert.assertEquals(1, mapping.size()); } @Test public void test5() { Graph tc1 = new SimpleMGraph(); tc1.add(new TripleImpl(new BlankNode(), u1, new BlankNode())); Graph tc2 = new SimpleMGraph(); tc2.add(new TripleImpl(new BlankNode(), u1, new BlankNode())); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNotNull(mapping); Assert.assertEquals(2, mapping.size()); } @Test public void test6() { Graph tc1 = new SimpleMGraph(); final BlankNode b11 = new BlankNode(); tc1.add(new TripleImpl(new BlankNode(), u1,b11)); tc1.add(new TripleImpl(new BlankNode(), u1,b11)); Graph tc2 = new SimpleMGraph(); tc2.add(new TripleImpl(new BlankNode(), u1, new BlankNode())); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNull(mapping); } private Graph generateCircle(int size) { return generateCircle(size, new BlankNode()); } private Graph generateCircle(int size, final BlankNodeOrIRI firstNode) { if (size < 1) { throw new IllegalArgumentException(); } Graph result = new SimpleMGraph(); BlankNodeOrIRI lastNode = firstNode; for (int i = 0; i < (size-1); i++) { final BlankNode newNode = new BlankNode(); result.add(new TripleImpl(lastNode, u1, newNode)); lastNode = newNode; } result.add(new TripleImpl(lastNode, u1, firstNode)); return result; } @Test public void test7() { Graph tc1 = generateCircle(2); Graph tc2 = generateCircle(2); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNotNull(mapping); Assert.assertEquals(2, mapping.size()); } @Test public void test8() { Graph tc1 = generateCircle(5); Graph tc2 = generateCircle(5); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNotNull(mapping); Assert.assertEquals(5, mapping.size()); } @Test public void test9() { BlankNodeOrIRI crossing = new IRI("http://example.org/"); Graph tc1 = generateCircle(2,crossing); tc1.addAll(generateCircle(3,crossing)); Graph tc2 = generateCircle(2,crossing); tc2.addAll(generateCircle(3,crossing)); Assert.assertEquals(5, tc1.size()); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNotNull(mapping); //a circle of 2 with 1 bnode and one of 2 bnodes Assert.assertEquals(3, mapping.size()); } @Test public void test10() { BlankNodeOrIRI crossing1 = new BlankNode(); Graph tc1 = generateCircle(2,crossing1); tc1.addAll(generateCircle(3,crossing1)); BlankNodeOrIRI crossing2 = new BlankNode(); Graph tc2 = generateCircle(2,crossing2); tc2.addAll(generateCircle(3,crossing2)); Assert.assertEquals(5, tc1.size()); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNotNull(mapping); //a circle of 2 and one of 3 with one common node Assert.assertEquals(4, mapping.size()); } @Test public void test11() { BlankNodeOrIRI crossing1 = new BlankNode(); Graph tc1 = generateCircle(2,crossing1); tc1.addAll(generateCircle(4,crossing1)); BlankNodeOrIRI crossing2 = new BlankNode(); Graph tc2 = generateCircle(3,crossing2); tc2.addAll(generateCircle(3,crossing2)); Assert.assertEquals(6, tc1.size()); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNull(mapping); } @Test public void test12() { BlankNodeOrIRI start1 = new BlankNode(); Graph tc1 = Utils4Testing.generateLine(4,start1); tc1.addAll(Utils4Testing.generateLine(5,start1)); BlankNodeOrIRI start2 = new BlankNode(); Graph tc2 = Utils4Testing.generateLine(5,start2); tc2.addAll(Utils4Testing.generateLine(4,start2)); Assert.assertEquals(9, tc1.size()); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNotNull(mapping); Assert.assertEquals(10, mapping.size()); } @Test public void test13() { BlankNodeOrIRI start1 = new BlankNode(); Graph tc1 = Utils4Testing.generateLine(4,start1); tc1.addAll(Utils4Testing.generateLine(5,start1)); BlankNodeOrIRI start2 = new BlankNode(); Graph tc2 = Utils4Testing.generateLine(3,start2); tc2.addAll(Utils4Testing.generateLine(3,start2)); Assert.assertEquals(9, tc1.size()); final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2); Assert.assertNull(mapping); } }
430
0
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils/simple/PlainLiteralImplTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.simple; import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl; import org.junit.Test; import org.apache.clerezza.commons.rdf.Language; import org.apache.clerezza.commons.rdf.Literal; import org.junit.Assert; /** * * @author reto * */ public class PlainLiteralImplTest { @Test public void plainLiteralEquality() { String stringValue = "some text"; Literal literal1 = new PlainLiteralImpl(stringValue); Literal literal2 = new PlainLiteralImpl(stringValue); Assert.assertEquals(literal1, literal2); Assert.assertEquals(literal1.hashCode(), literal2.hashCode()); Literal literal3 = new PlainLiteralImpl("something else"); Assert.assertFalse(literal1.equals(literal3)); } @Test public void languageLiteralEquality() { String stringValue = "some text"; Language lang = new Language("en-ca"); Literal literal1 = new PlainLiteralImpl(stringValue, lang); Literal literal2 = new PlainLiteralImpl(stringValue, lang); Assert.assertEquals(literal1, literal2); Assert.assertEquals(literal1.hashCode(), literal2.hashCode()); Language lang2 = new Language("de"); Literal literal3 = new PlainLiteralImpl(stringValue, lang2); Assert.assertFalse(literal1.equals(literal3)); Literal literal4 = new PlainLiteralImpl(stringValue, null); Assert.assertFalse(literal3.equals(literal4)); Assert.assertFalse(literal4.equals(literal3)); } /** * hashCode of the lexical form plus the hashCode of the locale */ @Test public void checkHashCode() { String stringValue = "some text"; Language language = new Language("en"); Literal literal = new PlainLiteralImpl(stringValue, language); Assert.assertEquals(literal.getDataType().hashCode() + stringValue.hashCode() + language.hashCode(), literal.hashCode()); } }
431
0
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils/simple/TypedLiteralImplTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.simple; import org.apache.clerezza.commons.rdf.impl.utils.TypedLiteralImpl; import org.junit.Test; import junit.framework.Assert; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.Literal; /** * * @author reto/** * * @author reto/** * * @author reto/** * * @author reto * */ public class TypedLiteralImplTest { @Test public void typedLiteralEquality() { String stringValue = "some text"; IRI uriRef = new IRI("http://example.org/datatypes/magic"); Literal literal1 = new TypedLiteralImpl(stringValue, uriRef); Literal literal2 = new TypedLiteralImpl(stringValue, uriRef); Assert.assertEquals(literal1, literal2); Assert.assertEquals(literal1.hashCode(), literal2.hashCode()); Literal literal3 = new TypedLiteralImpl("something else", uriRef); Assert.assertFalse(literal1.equals(literal3)); IRI uriRef2 = new IRI("http://example.org/datatypes/other"); Literal literal4 = new TypedLiteralImpl(stringValue, uriRef2); Assert.assertFalse(literal1.equals(literal4)); } /** * The hascode is equals to the hascode of the lexical form plus the hashcode of the dataTyp */ @Test public void checkHashCode() { String stringValue = "some text"; IRI uriRef = new IRI("http://example.org/datatypes/magic"); Literal literal = new TypedLiteralImpl(stringValue, uriRef); Assert.assertEquals(stringValue.hashCode() + uriRef.hashCode(), literal.hashCode()); } }
432
0
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils/simple/SimpleGraphTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.simple; import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl; import java.util.ConcurrentModificationException; import java.util.Iterator; import org.junit.Assert; import org.junit.Test; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph; /** * * @author mir */ public class SimpleGraphTest { private IRI uriRef1 = new IRI("http://example.org/foo"); private IRI uriRef2 = new IRI("http://example.org/bar"); private IRI uriRef3 = new IRI("http://example.org/test"); private Triple triple1 = new TripleImpl(uriRef1, uriRef2, uriRef3); private Triple triple2 = new TripleImpl(uriRef2, uriRef2, uriRef1); private Triple triple3 = new TripleImpl(uriRef3, uriRef1, uriRef3); private Triple triple4 = new TripleImpl(uriRef1, uriRef3, uriRef2); private Triple triple5 = new TripleImpl(uriRef2, uriRef3, uriRef2); @Test public void iteratorRemove() { SimpleGraph stc = new SimpleGraph(); stc.add(triple1); stc.add(triple2); stc.add(triple3); stc.add(triple4); stc.add(triple5); Iterator<Triple> iter = stc.iterator(); while (iter.hasNext()) { Triple triple = iter.next(); iter.remove(); } Assert.assertEquals(0, stc.size()); } @Test public void removeAll() { SimpleGraph stc = new SimpleGraph(); stc.add(triple1); stc.add(triple2); stc.add(triple3); stc.add(triple4); stc.add(triple5); SimpleGraph stc2 = new SimpleGraph(); stc2.add(triple1); stc2.add(triple3); stc2.add(triple5); stc.removeAll(stc2); Assert.assertEquals(2, stc.size()); } @Test public void filterIteratorRemove() { SimpleGraph stc = new SimpleGraph(); stc.add(triple1); stc.add(triple2); stc.add(triple3); stc.add(triple4); stc.add(triple5); Iterator<Triple> iter = stc.filter(uriRef1, null, null); while (iter.hasNext()) { Triple triple = iter.next(); iter.remove(); } Assert.assertEquals(3, stc.size()); } @Test(expected=ConcurrentModificationException.class) public void remove() { SimpleGraph stc = new SimpleGraph(); stc.setCheckConcurrency(true); stc.add(triple1); stc.add(triple2); stc.add(triple3); stc.add(triple4); stc.add(triple5); Iterator<Triple> iter = stc.filter(uriRef1, null, null); while (iter.hasNext()) { Triple triple = iter.next(); stc.remove(triple); } Assert.assertEquals(3, stc.size()); } }
433
0
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/test/java/org/apache/clerezza/commons/rdf/impl/utils/simple/TripleImplTest.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.apache.clerezza.commons.rdf.impl.utils.simple; /* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import org.junit.Test; import junit.framework.Assert; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl; import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl; /** * * @author reto * */ public class TripleImplTest { @Test public void tripleEquality() { BlankNodeOrIRI subject = new IRI("http://example.org/"); IRI predicate = new IRI("http://example.org/property"); RDFTerm object = new PlainLiteralImpl("property value"); Triple triple1 = new TripleImpl(subject, predicate, object); Triple triple2 = new TripleImpl(subject, predicate, object); Assert.assertEquals(triple1.hashCode(), triple2.hashCode()); Assert.assertEquals(triple1, triple2); } }
434
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/DelayedNotificator.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils; import java.lang.ref.WeakReference; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.clerezza.commons.rdf.event.GraphEvent; import org.apache.clerezza.commons.rdf.event.GraphListener; /** * * @author reto */ class DelayedNotificator { private static final Logger log = Logger.getLogger(DelayedNotificator.class.getName()); private static Timer timer = new Timer("Event delivery timer",true); static class ListenerHolder { long delay; List<GraphEvent> events = null; WeakReference<GraphListener> listenerRef; public ListenerHolder(GraphListener listener, long delay) { this.listenerRef = new WeakReference<GraphListener>(listener); this.delay = delay; } private void registerEvent(GraphEvent event) { synchronized (this) { if (events == null) { events = new ArrayList<GraphEvent>(); events.add(event); timer.schedule(new TimerTask() { @Override public void run() { List<GraphEvent> eventsLocal; synchronized (ListenerHolder.this) { eventsLocal = events; events = null; } GraphListener listener = listenerRef.get(); if (listener == null) { log.fine("Ignoring garbage collected listener"); } else { try { listener.graphChanged(eventsLocal); } catch (Exception e) { log.log(Level.WARNING, "Exception delivering ImmutableGraph event", e); } } } }, delay); } else { events.add(event); } } } } private final Map<GraphListener, ListenerHolder> map = Collections.synchronizedMap( new WeakHashMap<GraphListener, ListenerHolder>()); void addDelayedListener(GraphListener listener, long delay) { map.put(listener, new ListenerHolder(listener, delay)); } /** * removes a Listener, this doesn't prevent the listenerRef from receiving * events alreay scheduled. * * @param listenerRef */ void removeDelayedListener(GraphListener listener) { map.remove(listener); } /** * if the listenerRef has not been registered as delayed listenerRef te events is * forwarded synchroneously * @param event */ void sendEventToListener(GraphListener listener, GraphEvent event) { ListenerHolder holder = map.get(listener); if (holder == null) { listener.graphChanged(Collections.singletonList(event)); } else { holder.registerEvent(event); } } }
435
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/TripleImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.IRI; /** * * @author reto */ public class TripleImpl implements Triple { private final BlankNodeOrIRI subject; private final IRI predicate; private final RDFTerm object; /** * Creates a new <code>TripleImpl</code>. * * @param subject the subject. * @param predicate the predicate. * @param object the object. * @throws IllegalArgumentException if an attribute is <code>null</code>. */ public TripleImpl(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) { if (subject == null) { throw new IllegalArgumentException("Invalid subject: null"); } else if (predicate == null) { throw new IllegalArgumentException("Invalid predicate: null"); } else if (object == null) { throw new IllegalArgumentException("Invalid object: null"); } this.subject = subject; this.predicate = predicate; this.object = object; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (!(obj instanceof Triple)) { return false; } final Triple other = (Triple) obj; if (!this.subject.equals(other.getSubject())) { return false; } if (!this.predicate.equals(other.getPredicate())) { return false; } if (!this.object.equals(other.getObject())) { return false; } return true; } @Override public int hashCode() { return (subject.hashCode() >> 1) ^ predicate.hashCode() ^ (object.hashCode() << 1); } @Override public BlankNodeOrIRI getSubject() { return subject; } public IRI getPredicate() { return predicate; } public RDFTerm getObject() { return object; } @Override public String toString() { return subject + " " + predicate + " " + object + "."; } }
436
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/LockingIterator.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils; import java.util.Iterator; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import org.apache.clerezza.commons.rdf.Triple; /** * Wrapps an iterator<Triple> reading entering a read-lock on every invocation * of hasNext and next * @author reto */ class LockingIterator implements Iterator<Triple> { private Iterator<Triple> base; private Lock readLock; private Lock writeLock; public LockingIterator(Iterator<Triple> iterator, ReadWriteLock lock) { base = iterator; readLock = lock.readLock(); writeLock = lock.writeLock(); } @Override public boolean hasNext() { readLock.lock(); try { return base.hasNext(); } finally { readLock.unlock(); } } @Override public Triple next() { readLock.lock(); try { return base.next(); } finally { readLock.unlock(); } } @Override public void remove() { writeLock.lock(); try { base.remove(); } finally { writeLock.unlock(); } } }
437
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/AbstractImmutableGraph.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils; import java.util.Collection; import java.util.Iterator; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.ImmutableGraph; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.impl.utils.graphmatching.GraphMatcher; /** * <code>AbstractGraph</code> is an abstract implementation of <code>ImmutableGraph</code> * implementing the <code>equals</code> and the <code>hashCode</code> methods. * * @author reto * */ public abstract class AbstractImmutableGraph extends AbstractGraph implements ImmutableGraph { public final synchronized int hashCode() { int result = 0; for (Iterator<Triple> iter = iterator(); iter.hasNext();) { result += getBlankNodeBlindHash(iter.next()); } return result; } /** * @param triple * @return hash without BNode hashes */ private int getBlankNodeBlindHash(Triple triple) { int hash = triple.getPredicate().hashCode(); RDFTerm subject = triple.getSubject(); if (!(subject instanceof BlankNode)) { hash ^= subject.hashCode() >> 1; } RDFTerm object = triple.getObject(); if (!(object instanceof BlankNode)) { hash ^= object.hashCode() << 1; } return hash; } @Override public boolean add(Triple e) { throw new UnsupportedOperationException("Graphs are not mutable, use Graph"); } @Override public boolean addAll(Collection<? extends Triple> c) { throw new UnsupportedOperationException("Graphs are not mutable, use Graph"); } @Override public boolean remove(Object o) { throw new UnsupportedOperationException("Graphs are not mutable, use Graph"); } @Override public boolean removeAll(Collection<?> c) { throw new UnsupportedOperationException("Graphs are not mutable, use Graph"); } @Override public void clear() { throw new UnsupportedOperationException("Graphs are not mutable, use Graph"); } @Override public ImmutableGraph getImmutableGraph() { return this; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof ImmutableGraph)) { return false; } if (hashCode() != obj.hashCode()) { return false; } return GraphMatcher.getValidMapping(this, (ImmutableGraph) obj) != null; } }
438
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/TypedLiteralImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils; import java.io.Serializable; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.Language; import org.apache.clerezza.commons.rdf.Literal; /** * * @author reto */ public class TypedLiteralImpl extends AbstractLiteral implements Serializable { private String lexicalForm; private IRI dataType; private int hashCode; /** * @param lexicalForm * @param dataType */ public TypedLiteralImpl(String lexicalForm, IRI dataType) { this.lexicalForm = lexicalForm; this.dataType = dataType; this.hashCode = super.hashCode(); } public IRI getDataType() { return dataType; } /* (non-Javadoc) * @see org.apache.clerezza.rdf.core.LiteralNode#getLexicalForm() */ @Override public String getLexicalForm() { return lexicalForm; } @Override public int hashCode() { return hashCode; } @Override public String toString() { StringBuffer result = new StringBuffer(); result.append('\"'); result.append(getLexicalForm()); result.append('\"'); result.append("^^"); result.append(getDataType()); return result.toString(); } @Override public Language getLanguage() { return null; } }
439
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/LiteralImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils; import java.io.Serializable; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.Language; /** * * @author reto */ public class LiteralImpl extends AbstractLiteral implements Serializable { private String lexicalForm; private IRI dataType; private int hashCode; private Language language; /** * @param lexicalForm * @param dataType * @param Language the language of this literal */ public LiteralImpl(String lexicalForm, IRI dataType, Language language) { this.lexicalForm = lexicalForm; this.dataType = dataType; this.language = language; this.hashCode = super.hashCode(); } public IRI getDataType() { return dataType; } /* (non-Javadoc) * @see org.apache.clerezza.rdf.core.LiteralNode#getLexicalForm() */ @Override public String getLexicalForm() { return lexicalForm; } @Override public int hashCode() { return hashCode; } @Override public String toString() { StringBuffer result = new StringBuffer(); result.append('\"'); result.append(getLexicalForm()); result.append('\"'); result.append("^^"); result.append(getDataType()); return result.toString(); } @Override public Language getLanguage() { return language; } }
440
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/PlainLiteralImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils; import java.io.Serializable; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.Language; import org.apache.clerezza.commons.rdf.Literal; /** * * @author reto */ public class PlainLiteralImpl extends AbstractLiteral implements Literal, Serializable { private final String lexicalForm; private final Language language; public PlainLiteralImpl(String value) { this(value, null); } public PlainLiteralImpl(String value, Language language) { if (value == null) { throw new IllegalArgumentException("The literal string cannot be null"); } this.lexicalForm = value; this.language = language; if (language == null) { dataType = XSD_STRING; } else { dataType = RDF_LANG_STRING; } } @Override public String getLexicalForm() { return lexicalForm; } @Override public Language getLanguage() { return language; } @Override public String toString() { final StringBuilder result = new StringBuilder(); result.append('\"').append(lexicalForm).append('\"'); if (language != null) { result.append("@").append(language.toString()); } return result.toString(); } @Override public IRI getDataType() { return dataType; } private final IRI dataType; private static final IRI XSD_STRING = new IRI("http://www.w3.org/2001/XMLSchema#string"); private static final IRI RDF_LANG_STRING = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#langString"); }
441
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/WatchableGraphWrapper.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.utils; import java.lang.ref.WeakReference; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.concurrent.locks.ReadWriteLock; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.ImmutableGraph; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.WatchableGraph; import org.apache.clerezza.commons.rdf.event.AddEvent; import org.apache.clerezza.commons.rdf.event.FilterTriple; import org.apache.clerezza.commons.rdf.event.GraphEvent; import org.apache.clerezza.commons.rdf.event.GraphListener; import org.apache.clerezza.commons.rdf.event.RemoveEvent; /** * * @author developer */ public class WatchableGraphWrapper implements WatchableGraph { final Graph wrapped; public WatchableGraphWrapper(Graph wrapped) { this.wrapped = wrapped; } //all listeners private final Set<ListenerConfiguration> listenerConfigs = Collections.synchronizedSet( new HashSet<ListenerConfiguration>()); private DelayedNotificator delayedNotificator = new DelayedNotificator(); @Override public Iterator<Triple> iterator() { return filter(null, null, null); } @Override public boolean contains(Object o) { if (!(o instanceof Triple)) { return false; } Triple t = (Triple) o; return filter(t.getSubject(), t.getPredicate(), t.getObject()).hasNext(); } @Override public Iterator<Triple> filter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) { final Iterator<Triple> baseIter = wrapped.filter(subject, predicate, object); return new Iterator<Triple>() { Triple currentTriple = null; @Override public boolean hasNext() { return baseIter.hasNext(); } @Override public Triple next() { currentTriple = baseIter.next(); return currentTriple; } @Override public void remove() { baseIter.remove(); dispatchEvent(new RemoveEvent(WatchableGraphWrapper.this, currentTriple)); } }; } @Override public boolean add(Triple triple) { boolean success = performAdd(triple); if (success) { dispatchEvent(new AddEvent(this, triple)); } return success; } /** * A subclass of <code>AbstractGraph</code> should override * this method instead of <code>add</code> for Graph event support to be * added. * * @param e The triple to be added to the triple collection * @return */ protected boolean performAdd(Triple e) { return wrapped.add(e); } @Override public boolean remove(Object o) { Triple triple = (Triple) o; boolean success = performRemove(triple); if (success) { dispatchEvent(new RemoveEvent(this, triple)); } return success; } @Override public boolean removeAll(Collection<?> c) { boolean modified = false; for (Iterator<? extends Object> it = c.iterator(); it.hasNext();) { Object object = it.next(); if (remove(object)) { modified = true; } } return modified; } /** * A subclass of <code>AbstractGraph</code> should override * this method instead of <code>remove</code> for ImmutableGraph event support to be * added. * * @param o The triple to be removed from the triple collection * @return */ protected boolean performRemove(Triple triple) { Iterator<Triple> e = filter(null, null, null); while (e.hasNext()) { if (triple.equals(e.next())) { e.remove(); return true; } } return false; } /** * Dispatches a <code>GraphEvent</code> to all registered listeners for which * the specified <code>Triple</code> matches the <code>FilterTriple</code>s * of the listeners. * * @param triple The Triple that was modified * @param type The type of modification */ protected void dispatchEvent(GraphEvent event) { synchronized(listenerConfigs) { Iterator<ListenerConfiguration> iter = listenerConfigs.iterator(); while (iter.hasNext()) { ListenerConfiguration config = iter.next(); GraphListener registeredListener = config.getListener(); if (registeredListener == null) { iter.remove(); continue; } if (config.getFilter().match(event.getTriple())) { delayedNotificator.sendEventToListener(registeredListener, event); } } } } @Override public void addGraphListener(GraphListener listener, FilterTriple filter) { addGraphListener(listener, filter, 0); } @Override public void addGraphListener(GraphListener listener, FilterTriple filter, long delay) { listenerConfigs.add(new ListenerConfiguration(listener, filter)); if (delay > 0) { delayedNotificator.addDelayedListener(listener, delay); } } @Override public void removeGraphListener(GraphListener listener) { synchronized(listenerConfigs) { Iterator<ListenerConfiguration> iter = listenerConfigs.iterator(); while (iter.hasNext()) { ListenerConfiguration listenerConfig = iter.next(); GraphListener registeredListener = listenerConfig.getListener(); if ((registeredListener == null) || (registeredListener.equals(listener))) { iter.remove(); } } } delayedNotificator.removeDelayedListener(listener); } @Override public ImmutableGraph getImmutableGraph() { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } @Override public ReadWriteLock getLock() { return wrapped.getLock(); } @Override public int size() { return wrapped.size(); } @Override public boolean isEmpty() { return wrapped.isEmpty(); } @Override public Object[] toArray() { return wrapped.toArray(); } @Override public <T> T[] toArray(T[] a) { return wrapped.toArray(a); } @Override public boolean containsAll(Collection<?> c) { return wrapped.containsAll(c); } @Override public boolean addAll(Collection<? extends Triple> c) { return wrapped.addAll(c); } @Override public boolean retainAll(Collection<?> c) { return wrapped.retainAll(c); } @Override public void clear() { wrapped.clear(); } private static class ListenerConfiguration { private WeakReference<GraphListener> listenerRef; private FilterTriple filter; private ListenerConfiguration(GraphListener listener, FilterTriple filter) { this.listenerRef = new WeakReference<GraphListener>(listener); this.filter = filter; } /** * @return the listener */ GraphListener getListener() { GraphListener listener = listenerRef.get(); return listener; } /** * @return the filter */ FilterTriple getFilter() { return filter; } } }
442
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/AbstractLiteral.java
/* * Copyright 2015 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.utils; import org.apache.clerezza.commons.rdf.Literal; /** * * @author developer */ public abstract class AbstractLiteral implements Literal { @Override public int hashCode() { int result = 0; if (getLanguage() != null) { result = getLanguage().hashCode(); } result += getLexicalForm().hashCode(); result += getDataType().hashCode(); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof Literal) { Literal other = (Literal) obj; if (getLanguage() == null) { if (other.getLanguage() != null) { return false; } } else { if (!getLanguage().equals(other.getLanguage())) { return false; } } boolean res = getDataType().equals(other.getDataType()) && getLexicalForm().equals(other.getLexicalForm()); return res; } else { return false; } } }
443
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/AbstractGraph.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils; import java.lang.ref.WeakReference; import java.util.AbstractCollection; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.ImmutableGraph; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.WatchableGraph; import org.apache.clerezza.commons.rdf.event.AddEvent; import org.apache.clerezza.commons.rdf.event.FilterTriple; import org.apache.clerezza.commons.rdf.event.GraphEvent; import org.apache.clerezza.commons.rdf.event.GraphListener; import org.apache.clerezza.commons.rdf.event.RemoveEvent; import org.apache.clerezza.commons.rdf.impl.utils.debug.ReentrantReadWriteLockTracker; import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleImmutableGraph; /** * An abstract implementation of <code>Graph</code> implementing * <code>iterator</code> and <code>contains</code> calling <code>filter</code>. * * @author reto */ public abstract class AbstractGraph extends AbstractCollection<Triple> implements Graph { private static final String DEBUG_MODE = "rdfLocksDebugging"; private final ReadWriteLock lock; private final Lock readLock; private final Lock writeLock; /** * Constructs a LocalbleMGraph for an Graph. * * @param providedMGraph a non-lockable graph */ public AbstractGraph() { { String debugMode = System.getProperty(DEBUG_MODE); if (debugMode != null && debugMode.toLowerCase().equals("true")) { lock = new ReentrantReadWriteLockTracker(); } else { lock = new ReentrantReadWriteLock(); } } readLock = lock.readLock(); writeLock = lock.writeLock(); } public AbstractGraph(final ReadWriteLock lock) { this.lock = lock; readLock = lock.readLock(); writeLock = lock.writeLock(); } @Override public ReadWriteLock getLock() { return lock; } @Override public ImmutableGraph getImmutableGraph() { readLock.lock(); try { return performGetImmutableGraph(); } finally { readLock.unlock(); } } public ImmutableGraph performGetImmutableGraph() { return new SimpleImmutableGraph(this); } @Override public Iterator<Triple> filter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) { readLock.lock(); try { return new LockingIterator(performFilter(subject, predicate, object), lock); } finally { readLock.unlock(); } } @Override public int size() { readLock.lock(); try { return performSize(); } finally { readLock.unlock(); } } @Override public boolean isEmpty() { readLock.lock(); try { return performIsEmpty(); } finally { readLock.unlock(); } } @Override @SuppressWarnings("element-type-mismatch") public boolean contains(Object o) { readLock.lock(); try { return performContains(o); } finally { readLock.unlock(); } } @Override public Iterator<Triple> iterator() { readLock.lock(); try { return new LockingIterator(performIterator(), lock); } finally { readLock.unlock(); } } @Override public Object[] toArray() { readLock.lock(); try { return performToArray(); } finally { readLock.unlock(); } } @Override public <T> T[] toArray(T[] a) { readLock.lock(); try { return performToArray(a); } finally { readLock.unlock(); } } @Override public boolean containsAll(Collection<?> c) { readLock.lock(); try { return performContainsAll(c); } finally { readLock.unlock(); } } @Override public boolean add(Triple e) { writeLock.lock(); try { return performAdd(e); } finally { writeLock.unlock(); } } @Override public boolean remove(Object o) { writeLock.lock(); try { return performRemove(o); } finally { writeLock.unlock(); } } @Override public boolean addAll(Collection<? extends Triple> c) { writeLock.lock(); try { return performAddAll(c); } finally { writeLock.unlock(); } } @Override public boolean removeAll(Collection<?> c) { writeLock.lock(); try { return performRemoveAll(c); } finally { writeLock.unlock(); } } @Override public boolean retainAll(Collection<?> c) { writeLock.lock(); try { return performRetainAll(c); } finally { writeLock.unlock(); } } @Override public void clear() { writeLock.lock(); try { performClear(); } finally { writeLock.unlock(); } } @Override public boolean equals(Object obj) { /*if (obj == null) { return false; } if (obj == this) { return true; } if (obj.getClass() != getClass()) { return false; }*/ return this == obj; } protected abstract Iterator<Triple> performFilter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object); protected abstract int performSize(); protected boolean performIsEmpty() { return super.isEmpty(); } protected Object[] performToArray() { return super.toArray(); } protected boolean performRemove(Object o) { return super.remove(o); } protected boolean performAddAll(Collection<? extends Triple> c) { return super.addAll(c); } protected boolean performRemoveAll(Collection<?> c) { return super.removeAll(c); } protected boolean performRetainAll(Collection<?> c) { return super.retainAll(c); } protected void performClear() { super.clear(); } protected boolean performContains(Object o) { return super.contains(o); } protected Iterator<Triple> performIterator() { return performFilter(null, null, null); } protected boolean performContainsAll(Collection<?> c) { return super.containsAll(c); } protected <T> T[] performToArray(T[] a) { return super.toArray(a); } protected boolean performAdd(Triple e) { return super.add(e); } }
444
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/package-info.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Common RDF API Implementation utilities. */ package org.apache.clerezza.commons.rdf.impl.utils;
445
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/HashMatching.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching; import org.apache.clerezza.commons.rdf.impl.utils.graphmatching.collections.IntHashMap; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.IRI; import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl; import org.apache.clerezza.commons.rdf.impl.utils.graphmatching.collections.IntIterator; /** * * @author reto */ public class HashMatching { private Map<BlankNode, BlankNode> matchings = new HashMap<BlankNode, BlankNode>(); private Map<Set<BlankNode>, Set<BlankNode>> matchingGroups; /** * tc1 and tc2 will be modified: the triples containing no unmatched bnode * will be removed * * @param tc1 * @param tc2 * @throws GraphNotIsomorphicException */ HashMatching(Graph tc1, Graph tc2) throws GraphNotIsomorphicException { int foundMatchings = 0; int foundMatchingGroups = 0; Map<BlankNode, Integer> bNodeHashMap = new HashMap<BlankNode, Integer>(); while (true) { bNodeHashMap = matchByHashes(tc1, tc2, bNodeHashMap); if (bNodeHashMap == null) { throw new GraphNotIsomorphicException(); } if (matchings.size() == foundMatchings) { if (!(matchingGroups.size() > foundMatchingGroups)) { break; } } foundMatchings = matchings.size(); foundMatchingGroups = matchingGroups.size(); } } /** * * @return a map containing set of which each bnodes mappes one of the other set */ public Map<Set<BlankNode>, Set<BlankNode>> getMatchingGroups() { return matchingGroups; } public Map<BlankNode, BlankNode> getMatchings() { return matchings; } private static IntHashMap<Set<BlankNode>> getHashNodes(Map<BlankNode, Set<Property>> bNodePropMap, Map<BlankNode, Integer> bNodeHashMap) { IntHashMap<Set<BlankNode>> result = new IntHashMap<Set<BlankNode>>(); for (Map.Entry<BlankNode, Set<Property>> entry : bNodePropMap.entrySet()) { int hash = computeHash(entry.getValue(), bNodeHashMap); Set<BlankNode> bNodeSet = result.get(hash); if (bNodeSet == null) { bNodeSet = new HashSet<BlankNode>(); result.put(hash,bNodeSet); } bNodeSet.add(entry.getKey()); } return result; } /* * returns a Map from bnodes to hash that can be used for future * refinements, this could be separate for each ImmutableGraph. * * triples no longer containing an unmatched bnodes ae removed. * * Note that the matched node are not guaranteed to be equals, but only to * be the correct if the graphs are isomorphic. */ private Map<BlankNode, Integer> matchByHashes(Graph g1, Graph g2, Map<BlankNode, Integer> bNodeHashMap) { Map<BlankNode, Set<Property>> bNodePropMap1 = getBNodePropMap(g1); Map<BlankNode, Set<Property>> bNodePropMap2 = getBNodePropMap(g2); IntHashMap<Set<BlankNode>> hashNodeMap1 = getHashNodes(bNodePropMap1, bNodeHashMap); IntHashMap<Set<BlankNode>> hashNodeMap2 = getHashNodes(bNodePropMap2, bNodeHashMap); if (!hashNodeMap1.keySet().equals(hashNodeMap2.keySet())) { return null; } matchingGroups = new HashMap<Set<BlankNode>, Set<BlankNode>>(); IntIterator hashIter = hashNodeMap1.keySet().intIterator(); while (hashIter.hasNext()) { int hash = hashIter.next(); Set<BlankNode> nodes1 = hashNodeMap1.get(hash); Set<BlankNode> nodes2 = hashNodeMap2.get(hash); if (nodes1.size() != nodes2.size()) { return null; } if (nodes1.size() != 1) { matchingGroups.put(nodes1, nodes2); continue; } final BlankNode bNode1 = nodes1.iterator().next(); final BlankNode bNode2 = nodes2.iterator().next(); matchings.put(bNode1,bNode2); //in the graphs replace node occurences with grounded node, BlankNodeOrIRI mappedNode = new MappedNode(bNode1, bNode2); replaceNode(g1,bNode1, mappedNode); replaceNode(g2, bNode2, mappedNode); //remove grounded triples if (!Utils.removeGrounded(g1,g2)) { return null; } } Map<BlankNode, Integer> result = new HashMap<BlankNode, Integer>(); addInverted(result, hashNodeMap1); addInverted(result, hashNodeMap2); return result; } private static int computeHash(Set<Property> propertySet, Map<BlankNode, Integer> bNodeHashMap) { int result = 0; for (Property property : propertySet) { result += property.hashCode(bNodeHashMap); } return result; } private static Map<BlankNode, Set<Property>> getBNodePropMap(Graph g) { Set<BlankNode> bNodes = Utils.getBNodes(g); Map<BlankNode, Set<Property>> result = new HashMap<BlankNode, Set<Property>>(); for (BlankNode bNode : bNodes) { result.put(bNode, getProperties(bNode, g)); } return result; } private static Set<Property> getProperties(BlankNode bNode, Graph g) { Set<Property> result = new HashSet<Property>(); Iterator<Triple> ti = g.filter(bNode, null, null); while (ti.hasNext()) { Triple triple = ti.next(); result.add(new ForwardProperty(triple.getPredicate(), triple.getObject())); } ti = g.filter(null, null, bNode); while (ti.hasNext()) { Triple triple = ti.next(); result.add(new BackwardProperty(triple.getSubject(), triple.getPredicate())); } return result; } private static int nodeHash(RDFTerm resource, Map<BlankNode, Integer> bNodeHashMap) { if (resource instanceof BlankNode) { Integer mapValue = bNodeHashMap.get((BlankNode)resource); if (mapValue == null) { return 0; } else { return mapValue; } } else { return resource.hashCode(); } } private static void replaceNode(Graph graph, BlankNode bNode, BlankNodeOrIRI replacementNode) { Set<Triple> triplesToRemove = new HashSet<Triple>(); for (Triple triple : graph) { Triple replacementTriple = getReplacement(triple, bNode, replacementNode); if (replacementTriple != null) { triplesToRemove.add(triple); graph.add(replacementTriple); } } graph.removeAll(triplesToRemove); } private static Triple getReplacement(Triple triple, BlankNode bNode, BlankNodeOrIRI replacementNode) { if (triple.getSubject().equals(bNode)) { if (triple.getObject().equals(bNode)) { return new TripleImpl(replacementNode, triple.getPredicate(), replacementNode); } else { return new TripleImpl(replacementNode, triple.getPredicate(), triple.getObject()); } } else { if (triple.getObject().equals(bNode)) { return new TripleImpl(triple.getSubject(), triple.getPredicate(), replacementNode); } else { return null; } } } private static void addInverted(Map<BlankNode, Integer> result, IntHashMap<Set<BlankNode>> hashNodeMap) { for (int hash : hashNodeMap.keySet()) { Set<BlankNode> bNodes = hashNodeMap.get(hash); for (BlankNode bNode : bNodes) { result.put(bNode, hash); } } } private static class BackwardProperty implements Property { private BlankNodeOrIRI subject; private IRI predicate; public BackwardProperty(BlankNodeOrIRI subject, IRI predicate) { this.subject = subject; this.predicate = predicate; } @Override public int hashCode(Map<BlankNode, Integer> bNodeHashMap) { return 0xFF ^ predicate.hashCode() ^ nodeHash(subject, bNodeHashMap); } } private static class ForwardProperty implements Property { private IRI predicate; private RDFTerm object; public ForwardProperty(IRI predicate, RDFTerm object) { this.predicate = predicate; this.object = object; } @Override public int hashCode(Map<BlankNode, Integer> bNodeHashMap) { return predicate.hashCode() ^ nodeHash(object, bNodeHashMap); } } private static class MappedNode implements BlankNodeOrIRI { private BlankNode bNode1, bNode2; public MappedNode(BlankNode bNode1, BlankNode bNode2) { this.bNode1 = bNode1; this.bNode2 = bNode2; } } private static interface Property { public int hashCode(Map<BlankNode, Integer> bNodeHashMap); } }
446
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/GraphNotIsomorphicException.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching; /** * * @author reto */ class GraphNotIsomorphicException extends Exception { }
447
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/Utils.java
package org.apache.clerezza.commons.rdf.impl.utils.graphmatching; /* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.Triple; public class Utils { static Set<BlankNode> getBNodes(Collection<Triple> s) { Set<BlankNode> result = new HashSet<BlankNode>(); for (Triple triple : s) { if (triple.getSubject() instanceof BlankNode) { result.add((BlankNode) triple.getSubject()); } if (triple.getObject() instanceof BlankNode) { result.add((BlankNode) triple.getObject()); } } return result; } /** * removes the common grounded triples from s1 and s2. returns false if * a grounded triple is not in both sets, true otherwise */ static boolean removeGrounded(Collection<Triple> s1, Collection<Triple> s2) { Iterator<Triple> triplesIter = s1.iterator(); while (triplesIter.hasNext()) { Triple triple = triplesIter.next(); if (!isGrounded(triple)) { continue; } if (!s2.remove(triple)) { return false; } triplesIter.remove(); } //for efficiency we might skip this (redefine method) for (Triple triple : s2) { if (isGrounded(triple)) { return false; } } return true; } private static boolean isGrounded(Triple triple) { if (triple.getSubject() instanceof BlankNode) { return false; } if (triple.getObject() instanceof BlankNode) { return false; } return true; } }
448
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/GroupMappingIterator.java
/* * Copyright 2010 reto. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; /** * Iterates over all mappings from each element of every Set<T> to each * elemenent of their corresponding Set<U>. * * @author reto */ class GroupMappingIterator<T,U> implements Iterator<Map<T, U>> { private Iterator<Map<T, U>> firstPartIter; private Map<T, U> currentFirstPart; final private Map<Set<T>, Set<U>> restMap; private Iterator<Map<T, U>> currentRestPartIter; static <T,U> Iterator<Map<T, U>> create(Map<Set<T>, Set<U>> matchingGroups) { if (matchingGroups.size() > 1) { return new GroupMappingIterator<T, U>(matchingGroups); } else { if (matchingGroups.size() == 0) { return new ArrayList<Map<T, U>>(0).iterator(); } Map.Entry<Set<T>, Set<U>> entry = matchingGroups.entrySet().iterator().next(); return new MappingIterator<T,U>(entry.getKey(), entry.getValue()); } } private GroupMappingIterator(Map<Set<T>, Set<U>> matchingGroups) { if (matchingGroups.size() == 0) { throw new IllegalArgumentException("matchingGroups must not be empty"); } restMap = new HashMap<Set<T>, Set<U>>(); boolean first = true; for (Map.Entry<Set<T>, Set<U>> entry : matchingGroups.entrySet()) { if (first) { firstPartIter = new MappingIterator<T,U>(entry.getKey(), entry.getValue()); first = false; } else { restMap.put(entry.getKey(), entry.getValue()); } } currentRestPartIter = create(restMap); currentFirstPart = firstPartIter.next(); } @Override public boolean hasNext() { return firstPartIter.hasNext() || currentRestPartIter.hasNext(); } @Override public Map<T, U> next() { Map<T, U> restPart; if (currentRestPartIter.hasNext()) { restPart = currentRestPartIter.next(); } else { if (firstPartIter.hasNext()) { currentFirstPart = firstPartIter.next(); currentRestPartIter = create(restMap); restPart = currentRestPartIter.next(); } else { throw new NoSuchElementException(); } } Map<T, U> result = new HashMap<T, U>(restPart); result.putAll(currentFirstPart); return result; } @Override public void remove() { throw new UnsupportedOperationException("Not supported."); } }
449
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/PermutationIterator.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; /** * * An Iterator over all permuations of a list. * * @author reto */ class PermutationIterator<T> implements Iterator<List<T>> { private Iterator<List<T>> restIterator; private List<T> list; private List<T> next; int posInList = 0; //the position of the last element of next returned list //with list, this is the one excluded from restIterator PermutationIterator(List<T> list) { this.list = Collections.unmodifiableList(list); if (list.size() > 1) { createRestList(); } prepareNext(); } @Override public boolean hasNext() { return next != null; } @Override public List<T> next() { List<T> result = next; if (result == null) { throw new NoSuchElementException(); } prepareNext(); return result; } @Override public void remove() { throw new UnsupportedOperationException("Not supported"); } private void createRestList() { List<T> restList = new ArrayList<T>(list); restList.remove(posInList); restIterator = new PermutationIterator<T>(restList); } private void prepareNext() { next = getNext(); } private List<T> getNext() { if (list.size() == 0) { return null; } if (list.size() == 1) { if (posInList++ == 0) { return new ArrayList<T>(list); } else { return null; } } else { if (!restIterator.hasNext()) { if (posInList < (list.size()-1)) { posInList++; createRestList(); } else { return null; } } List<T> result = restIterator.next(); result.add(list.get(posInList)); return result; } } }
450
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/GraphMatcher.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.clerezza.commons.rdf.BlankNode; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl; import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleMGraph; /** * @author reto * */ public class GraphMatcher { private final static Logger log = Logger.getLogger(GraphMatcher.class.getName()); /** * get a mapping from g1 to g2 or null if the graphs are not isomorphic. The * returned map maps each <code>BNode</code>s from g1 to one * of g2. If the graphs are ground graphs the method return an empty map if * the ImmutableGraph are equals and null otherwise. * <p/> * NOTE: This method does not returned mapping from blank nodes to grounded * nodes, a bnode in g1 is not a vraiable that may match any node, but must * match a bnode in g2. * <p/> * * On the algorithm:<br/> * - In a first step it checked if every grounded triple in g1 matches one * in g2<br/> * - [optional] blank node blind matching</br> * - in a map mbng1 bnode of g1 is mapped to a set of of its * properties and inverse properties, this is the predicate and the object * or subject respectively, analoguosly in mbgn2 every bnode of g2<br/> * - based on the incoming and outgoing properties a hash is calculated for * each bnode, in the first step when calculating the hash aconstant value * is taken for the bnodes that might be subject or object in the (inverse properties) * - hash-classes: * * @param g1 * @param g2 * @return a Set of NodePairs */ public static Map<BlankNode, BlankNode> getValidMapping(Graph og1, Graph og2) { Graph g1 = new SimpleMGraph(og1); Graph g2 = new SimpleMGraph(og2); if (!Utils.removeGrounded(g1,g2)) { return null; } final HashMatching hashMatching; try { hashMatching = new HashMatching(g1, g2); } catch (GraphNotIsomorphicException ex) { return null; } Map<BlankNode, BlankNode> matchings = hashMatching.getMatchings(); if (g1.size() > 0) { //start trial an error matching //TODO (CLEREZZA-81) at least in the situation where one matching //group is big (approx > 5) we should switch back to hash-based matching //after a first guessed matching, rather than try all permutations Map<BlankNode, BlankNode> remainingMappings = trialAndErrorMatching(g1, g2, hashMatching.getMatchingGroups()); if (remainingMappings == null) { return null; } else { matchings.putAll(remainingMappings); } } return matchings; } private static Map<BlankNode, BlankNode> trialAndErrorMatching(Graph g1, Graph g2, Map<Set<BlankNode>, Set<BlankNode>> matchingGroups) { if (log.isLoggable(Level.FINE)) { Set<BlankNode> bn1 = Utils.getBNodes(g1); log.log(Level.FINE,"doing trial and error matching for {0}"+" bnodes, "+"in graphs of size: {1}.", new Object[]{bn1.size(), g1.size()}); } Iterator<Map<BlankNode, BlankNode>> mappingIter = GroupMappingIterator.create(matchingGroups); while (mappingIter.hasNext()) { Map<BlankNode, BlankNode> map = mappingIter.next(); if (checkMapping(g1, g2, map)) { return map; } } return null; } private static boolean checkMapping(Graph g1, Graph g2, Map<BlankNode, BlankNode> map) { for (Triple triple : g1) { if (!g2.contains(map(triple, map))) { return false; } } return true; } private static Triple map(Triple triple, Map<BlankNode, BlankNode> map) { final BlankNodeOrIRI oSubject = triple.getSubject(); BlankNodeOrIRI subject = oSubject instanceof BlankNode ? map.get((BlankNode)oSubject) : oSubject; RDFTerm oObject = triple.getObject(); RDFTerm object = oObject instanceof BlankNode ? map.get((BlankNode)oObject) : oObject; return new TripleImpl(subject, triple.getPredicate(), object); } }
451
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/MappingIterator.java
package org.apache.clerezza.commons.rdf.impl.utils.graphmatching; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * An iterator over all possible mapping beetween the elemnets of two sets of * the same size, each mapping maps each element from set1 to a disctinct one of * set2. * * * * @author reto */ class MappingIterator<T,U> implements Iterator<Map<T, U>> { private List<T> list1; private Iterator<List<U>> permutationList2Iterator; public MappingIterator(Set<T> set1, Set<U> set2) { if (set1.size() != set2.size()) { throw new IllegalArgumentException(); } this.list1 = new ArrayList<T>(set1); permutationList2Iterator = new PermutationIterator<U>( new ArrayList<U>(set2)); } @Override public boolean hasNext() { return permutationList2Iterator.hasNext(); } @Override public Map<T, U> next() { List<U> list2 = permutationList2Iterator.next(); Map<T, U> result = new HashMap<T, U>(list1.size()); for (int i = 0; i < list1.size(); i++) { result.put(list1.get(i), list2.get(i)); } return result; } @Override public void remove() { throw new UnsupportedOperationException("Not supported."); } }
452
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/collections/IntHashSet.java
/* * Copyright 2002-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching.collections; import java.util.HashSet; import java.util.Iterator; /** * This is currently just a placeholder implementation based onm HashSet<Integer> * an efficient implementation is to store the primitives directly. * * @author reto */ public class IntHashSet extends HashSet<Integer> implements IntSet { @Override public IntIterator intIterator() { final Iterator<Integer> base = iterator(); return new IntIterator() { @Override public int nextInt() { return base.next(); } @Override public boolean hasNext() { return base.hasNext(); } @Override public Integer next() { return base.next(); } @Override public void remove() { base.remove(); } }; } @Override public void add(int i) { super.add((Integer)i); } }
453
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/collections/IntSet.java
/* * Copyright 2002-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching.collections; import java.util.Set; /** * A IntSet allows directly adding primitive ints to a set, Set<Integer> is * extended, but accessingt he respective methods is less efficient. * * @author reto */ public interface IntSet extends Set<Integer> { /** * * @return an iterator over the primitive int */ public IntIterator intIterator(); public void add(int i); }
454
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/collections/IntIterator.java
/* * Copyright 2002-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching.collections; import java.util.Iterator; /** * An iterator allowing to iterate over ints, Iterator<Integer> is extended for * compatibility, however accessing nextInt allows faster implementations. * * @author reto */ public interface IntIterator extends Iterator<Integer> { public int nextInt(); }
455
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/graphmatching/collections/IntHashMap.java
/* * Copyright 2002-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Note: originally released under the GNU LGPL v2.1, * but rereleased by the original author under the ASF license (above). */ package org.apache.clerezza.commons.rdf.impl.utils.graphmatching.collections; /** * <p>A hash map that uses primitive ints for the key rather than objects.</p> * * <p>Note that this class is for internal optimization purposes only, and may * not be supported in future releases of Jakarta Commons Lang. Utilities of * this sort may be included in future releases of Jakarta Commons Collections.</p> * * @author Justin Couch * @author Alex Chaffee (alex@apache.org) * @author Stephen Colebourne * @since 2.0 * @version $Revision: 1.2 $ * @see java.util.HashMap */ public class IntHashMap<T> { private IntSet keySet = new IntHashSet(); /** * The hash table data. */ private transient Entry<T> table[]; /** * The total number of entries in the hash table. */ private transient int count; /** * The table is rehashed when its size exceeds this threshold. (The * value of this field is (int)(capacity * loadFactor).) * * @serial */ private int threshold; /** * The load factor for the hashtable. * * @serial */ private float loadFactor; /** * <p>Innerclass that acts as a datastructure to create a new entry in the * table.</p> */ private static class Entry<T> { int hash; int key; T value; Entry<T> next; /** * <p>Create a new entry with the given values.</p> * * @param hash The code used to hash the object with * @param key The key used to enter this in the table * @param value The value for this key * @param next A reference to the next entry in the table */ protected Entry(int hash, int key, T value, Entry<T> next) { this.hash = hash; this.key = key; this.value = value; this.next = next; } } /** * <p>Constructs a new, empty hashtable with a default capacity and load * factor, which is <code>20</code> and <code>0.75</code> respectively.</p> */ public IntHashMap() { this(20, 0.75f); } /** * <p>Constructs a new, empty hashtable with the specified initial capacity * and default load factor, which is <code>0.75</code>.</p> * * @param initialCapacity the initial capacity of the hashtable. * @throws IllegalArgumentException if the initial capacity is less * than zero. */ public IntHashMap(int initialCapacity) { this(initialCapacity, 0.75f); } /** * <p>Constructs a new, empty hashtable with the specified initial * capacity and the specified load factor.</p> * * @param initialCapacity the initial capacity of the hashtable. * @param loadFactor the load factor of the hashtable. * @throws IllegalArgumentException if the initial capacity is less * than zero, or if the load factor is nonpositive. */ public IntHashMap(int initialCapacity, float loadFactor) { super(); if (initialCapacity < 0) { throw new IllegalArgumentException("Illegal Capacity: " + initialCapacity); } if (loadFactor <= 0) { throw new IllegalArgumentException("Illegal Load: " + loadFactor); } if (initialCapacity == 0) { initialCapacity = 1; } this.loadFactor = loadFactor; table = new Entry[initialCapacity]; threshold = (int) (initialCapacity * loadFactor); } /** * <p>Returns the number of keys in this hashtable.</p> * * @return the number of keys in this hashtable. */ public int size() { return count; } /** * <p>Tests if this hashtable maps no keys to values.</p> * * @return <code>true</code> if this hashtable maps no keys to values; * <code>false</code> otherwise. */ public boolean isEmpty() { return count == 0; } /** * <p>Tests if some key maps into the specified value in this hashtable. * This operation is more expensive than the <code>containsKey</code> * method.</p> * * <p>Note that this method is identical in functionality to containsValue, * (which is part of the Map interface in the collections framework).</p> * * @param value a value to search for. * @return <code>true</code> if and only if some key maps to the * <code>value</code> argument in this hashtable as * determined by the <tt>equals</tt> method; * <code>false</code> otherwise. * @throws NullPointerException if the value is <code>null</code>. * @see #containsKey(int) * @see #containsValue(Object) * @see java.util.Map */ public boolean contains(Object value) { if (value == null) { throw new NullPointerException(); } Entry tab[] = table; for (int i = tab.length; i-- > 0;) { for (Entry e = tab[i]; e != null; e = e.next) { if (e.value.equals(value)) { return true; } } } return false; } /** * <p>Returns <code>true</code> if this HashMap maps one or more keys * to this value.</p> * * <p>Note that this method is identical in functionality to contains * (which predates the Map interface).</p> * * @param value value whose presence in this HashMap is to be tested. * @see java.util.Map * @since JDK1.2 */ public boolean containsValue(Object value) { return contains(value); } /** * <p>Tests if the specified object is a key in this hashtable.</p> * * @param key possible key. * @return <code>true</code> if and only if the specified object is a * key in this hashtable, as determined by the <tt>equals</tt> * method; <code>false</code> otherwise. * @see #contains(Object) */ public boolean containsKey(int key) { Entry tab[] = table; int hash = key; int index = (hash & 0x7FFFFFFF) % tab.length; for (Entry e = tab[index]; e != null; e = e.next) { if (e.hash == hash) { return true; } } return false; } /** * <p>Returns the value to which the specified key is mapped in this map.</p> * * @param key a key in the hashtable. * @return the value to which the key is mapped in this hashtable; * <code>null</code> if the key is not mapped to any value in * this hashtable. * @see #put(int, Object) */ public T get(int key) { Entry<T> tab[] = table; int hash = key; int index = (hash & 0x7FFFFFFF) % tab.length; for (Entry<T> e = tab[index]; e != null; e = e.next) { if (e.hash == hash) { return e.value; } } return null; } /** * <p>Increases the capacity of and internally reorganizes this * hashtable, in order to accommodate and access its entries more * efficiently.</p> * * <p>This method is called automatically when the number of keys * in the hashtable exceeds this hashtable's capacity and load * factor.</p> */ protected void rehash() { int oldCapacity = table.length; Entry<T> oldMap[] = table; int newCapacity = oldCapacity * 2 + 1; Entry<T> newMap[] = new Entry[newCapacity]; threshold = (int) (newCapacity * loadFactor); table = newMap; for (int i = oldCapacity; i-- > 0;) { for (Entry<T> old = oldMap[i]; old != null;) { Entry<T> e = old; old = old.next; int index = (e.hash & 0x7FFFFFFF) % newCapacity; e.next = newMap[index]; newMap[index] = e; } } } /** * <p>Maps the specified <code>key</code> to the specified * <code>value</code> in this hashtable. The key cannot be * <code>null</code>. </p> * * <p>The value can be retrieved by calling the <code>get</code> method * with a key that is equal to the original key.</p> * * @param key the hashtable key. * @param value the value. * @return the previous value of the specified key in this hashtable, * or <code>null</code> if it did not have one. * @throws NullPointerException if the key is <code>null</code>. * @see #get(int) */ public Object put(int key, T value) { keySet.add(key); // Makes sure the key is not already in the hashtable. Entry<T> tab[] = table; int hash = key; int index = (hash & 0x7FFFFFFF) % tab.length; for (Entry<T> e = tab[index]; e != null; e = e.next) { if (e.hash == hash) { T old = e.value; e.value = value; return old; } } if (count >= threshold) { // Rehash the table if the threshold is exceeded rehash(); tab = table; index = (hash & 0x7FFFFFFF) % tab.length; } // Creates the new entry. Entry<T> e = new Entry<T>(hash, key, value, tab[index]); tab[index] = e; count++; return null; } /** * <p>Removes the key (and its corresponding value) from this * hashtable.</p> * * <p>This method does nothing if the key is not present in the * hashtable.</p> * * @param key the key that needs to be removed. * @return the value to which the key had been mapped in this hashtable, * or <code>null</code> if the key did not have a mapping. */ /*public Object remove(int key) { Entry tab[] = table; int hash = key; int index = (hash & 0x7FFFFFFF) % tab.length; for (Entry e = tab[index], prev = null; e != null; prev = e, e = e.next) { if (e.hash == hash) { if (prev != null) { prev.next = e.next; } else { tab[index] = e.next; } count--; Object oldValue = e.value; e.value = null; return oldValue; } } return null; }*/ /** * <p>Clears this hashtable so that it contains no keys.</p> */ public synchronized void clear() { keySet.clear(); Entry tab[] = table; for (int index = tab.length; --index >= 0;) { tab[index] = null; } count = 0; } public IntSet keySet() { return keySet; } }
456
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/simple/SimpleMGraph.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.simple; import java.util.Collection; import java.util.Iterator; import java.util.Set; import org.apache.clerezza.commons.rdf.ImmutableGraph; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.Triple; /** * * @deprecated Use SimpleGraph * @author reto */ @Deprecated public class SimpleMGraph extends SimpleGraph implements Graph { /** * Creates an empty SimpleMGraph */ public SimpleMGraph() { } public SimpleMGraph(Set<Triple> baseSet) { super(baseSet); } public SimpleMGraph(Collection<Triple> baseCollection) { super(baseCollection); } public SimpleMGraph(Iterator<Triple> iterator) { super(iterator); } }
457
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/simple/SimpleGraph.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.simple; import org.apache.clerezza.commons.rdf.impl.utils.AbstractGraph; import java.lang.ref.SoftReference; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.ConcurrentModificationException; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.ImmutableGraph; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.IRI; /** * For now this is a minimalistic implementation, without any indexes or other * optimizations. * * @author reto */ public class SimpleGraph extends AbstractGraph { final Set<Triple> triples; private boolean checkConcurrency = false; class SimpleIterator implements Iterator<Triple> { private Iterator<Triple> listIter; private boolean isValid = true; public SimpleIterator(Iterator<Triple> listIter) { this.listIter = listIter; } private Triple currentNext; @Override public boolean hasNext() { checkValidity(); return listIter.hasNext(); } @Override public Triple next() { checkValidity(); currentNext = listIter.next(); return currentNext; } @Override public void remove() { checkValidity(); listIter.remove(); triples.remove(currentNext); invalidateIterators(this); } private void checkValidity() throws ConcurrentModificationException { if (checkConcurrency && !isValid) { throw new ConcurrentModificationException(); } } private void invalidate() { isValid = false; } } private final Set<SoftReference<SimpleIterator>> iterators = Collections.synchronizedSet(new HashSet<SoftReference<SimpleIterator>>()); /** * Creates an empty SimpleGraph */ public SimpleGraph() { triples = Collections.synchronizedSet(new HashSet<Triple>()); } /** * Creates a SimpleGraph using the passed iterator, the iterator * is consumed before the constructor returns * * @param iterator */ public SimpleGraph(Iterator<Triple> iterator) { triples = new HashSet<Triple>(); while (iterator.hasNext()) { Triple triple = iterator.next(); triples.add(triple); } } /** * Creates a SimpleGraph for the specified set of triples, * subsequent modification of baseSet do affect the created instance. * * @param baseSet */ public SimpleGraph(Set<Triple> baseSet) { this.triples = baseSet; } /** * Creates a SimpleGraph for the specified collection of triples, * subsequent modification of baseSet do not affect the created instance. * * @param baseSet */ public SimpleGraph(Collection<Triple> baseCollection) { this.triples = new HashSet<Triple>(baseCollection); } @Override public int performSize() { return triples.size(); } @Override public Iterator<Triple> performFilter(final BlankNodeOrIRI subject, final IRI predicate, final RDFTerm object) { final List<Triple> tripleList = new ArrayList<Triple>(); synchronized (triples) { Iterator<Triple> baseIter = triples.iterator(); while (baseIter.hasNext()) { Triple triple = baseIter.next(); if ((subject != null) && (!triple.getSubject().equals(subject))) { continue; } if ((predicate != null) && (!triple.getPredicate().equals(predicate))) { continue; } if ((object != null) && (!triple.getObject().equals(object))) { continue; } tripleList.add(triple); } final Iterator<Triple> listIter = tripleList.iterator(); SimpleIterator resultIter = new SimpleIterator(listIter); if (checkConcurrency) { iterators.add(new SoftReference<SimpleIterator>(resultIter)); } return resultIter; } } @Override public boolean performAdd(Triple e) { boolean modified = triples.add(e); if (modified) { invalidateIterators(null); } return modified; } private void invalidateIterators(SimpleIterator caller) { if (!checkConcurrency) { return; } Set<SoftReference> oldReferences = new HashSet<SoftReference>(); synchronized(iterators) { for (SoftReference<SimpleGraph.SimpleIterator> softReference : iterators) { SimpleIterator simpleIterator = softReference.get(); if (simpleIterator == null) { oldReferences.add(softReference); continue; } if (simpleIterator != caller) { simpleIterator.invalidate(); } } } iterators.removeAll(oldReferences); } /** * Specifies whether or not to throw <code>ConcurrentModificationException</code>s, * if this simple triple collection is modified concurrently. Concurrency * check is set to false by default. * * @param bool Specifies whether or not to check concurrent modifications. */ public void setCheckConcurrency(boolean bool) { checkConcurrency = bool; } @Override public ImmutableGraph getImmutableGraph() { return new SimpleImmutableGraph(this); } }
458
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/simple/SimpleImmutableGraph.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.simple; import org.apache.clerezza.commons.rdf.impl.utils.AbstractImmutableGraph; import java.util.Iterator; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.Graph; import org.apache.clerezza.commons.rdf.IRI; /** * * @author reto */ public class SimpleImmutableGraph extends AbstractImmutableGraph { private Graph graph; /** * Creates a ImmutableGraph with the triples in Graph * * @param Graph the collection of triples this ImmutableGraph shall consist of */ public SimpleImmutableGraph(Graph Graph) { this.graph = new SimpleGraph(Graph.iterator()); } /** * Creates a ImmutableGraph with the triples in Graph. * * This construction allows to specify if the Graph might change * in future. If GraphWillNeverChange is set to true it will * assume that the collection never changes, in this case the collection * isn't copied making things more efficient. * * @param Graph the collection of triples this ImmutableGraph shall consist of * @param GraphWillNeverChange true if the caller promises Graph will never change */ public SimpleImmutableGraph(Graph Graph, boolean GraphWillNeverChange) { if (!GraphWillNeverChange) { this.graph = new SimpleGraph(Graph.iterator()); } else { this.graph = Graph; } } public SimpleImmutableGraph(Iterator<Triple> tripleIter) { this.graph = new SimpleGraph(tripleIter); } @Override public int performSize() { return graph.size(); } @Override public Iterator<Triple> performFilter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) { return graph.filter(subject, predicate, object); } }
459
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/debug/ReadLockDebug.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.debug; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; /** * * @author mir */ public class ReadLockDebug extends ReadLock { ReentrantReadWriteLockTracker lock; StackTraceElement[] stackTrace; ReadLock readLock; public ReadLockDebug(ReentrantReadWriteLockTracker lock) { super(lock); this.lock = lock; this.readLock = lock.realReadLock(); } @Override public void lock() { readLock.lock(); lock.addLockedReadLock(this); stackTrace = Thread.currentThread().getStackTrace(); } @Override public void lockInterruptibly() throws InterruptedException { readLock.lockInterruptibly(); } @Override public Condition newCondition() { return readLock.newCondition(); } @Override public String toString() { return readLock.toString(); } @Override public boolean tryLock() { return readLock.tryLock(); } @Override public boolean tryLock(long timeout, TimeUnit unit) throws InterruptedException { return readLock.tryLock(timeout, unit); } @Override public void unlock() { readLock.unlock(); lock.removeReadLock(this); stackTrace = null; } public StackTraceElement[] getStackTrace() { return stackTrace; } }
460
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/debug/WriteLockDebug.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.debug; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; /** * * @author mir */ public class WriteLockDebug extends WriteLock { private ReentrantReadWriteLockTracker lock; private WriteLock writeLock; private StackTraceElement[] stackTrace; public WriteLockDebug(ReentrantReadWriteLockTracker lock) { super(lock); this.lock = lock; this.writeLock = lock.realWriteLock(); } @Override public int getHoldCount() { return writeLock.getHoldCount(); } @Override public boolean isHeldByCurrentThread() { return writeLock.isHeldByCurrentThread(); } @Override public void lock() { writeLock.lock(); stackTrace = Thread.currentThread().getStackTrace(); } @Override public void lockInterruptibly() throws InterruptedException { writeLock.lockInterruptibly(); } @Override public Condition newCondition() { return writeLock.newCondition(); } @Override public boolean tryLock() { return writeLock.tryLock(); } @Override public boolean tryLock(long timeout, TimeUnit unit) throws InterruptedException { return writeLock.tryLock(timeout, unit); } @Override public void unlock() { writeLock.unlock(); stackTrace = null; } public StackTraceElement[] getStackTrace() { return stackTrace; } }
461
0
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils
Create_ds/clerezza-rdf-core/impl.utils/src/main/java/org/apache/clerezza/commons/rdf/impl/utils/debug/ReentrantReadWriteLockTracker.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.impl.utils.debug; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantReadWriteLock; /** * * @author mir */ public class ReentrantReadWriteLockTracker extends ReentrantReadWriteLock { private Set<ReadLockDebug> lockedReadLocks = Collections.synchronizedSet(new HashSet<ReadLockDebug>()); private final WriteLockDebug writeLock = new WriteLockDebug(this); @Override protected Thread getOwner() { return super.getOwner(); } @Override protected Collection<Thread> getQueuedReaderThreads() { return super.getQueuedReaderThreads(); } @Override protected Collection<Thread> getQueuedThreads() { return super.getQueuedThreads(); } @Override protected Collection<Thread> getQueuedWriterThreads() { return super.getQueuedWriterThreads(); } @Override public int getReadHoldCount() { return super.getReadHoldCount(); } @Override public int getReadLockCount() { return super.getReadLockCount(); } @Override public int getWaitQueueLength(Condition condition) { return super.getWaitQueueLength(condition); } @Override protected Collection<Thread> getWaitingThreads(Condition condition) { return super.getWaitingThreads(condition); } @Override public int getWriteHoldCount() { return super.getWriteHoldCount(); } @Override public boolean hasWaiters(Condition condition) { return super.hasWaiters(condition); } @Override public boolean isWriteLocked() { return super.isWriteLocked(); } @Override public boolean isWriteLockedByCurrentThread() { return super.isWriteLockedByCurrentThread(); } @Override public ReadLock readLock() { return new ReadLockDebug(this); } ReadLock realReadLock() { return super.readLock(); } WriteLock realWriteLock() { return super.writeLock(); } @Override public String toString() { return super.toString(); } @Override public WriteLockDebug writeLock() { return writeLock; } void addLockedReadLock(ReadLockDebug lock) { lockedReadLocks.add(lock); } void removeReadLock(ReadLockDebug lock) { lockedReadLocks.remove(lock); } public Set<ReadLockDebug> getLockedReadLocks() { return lockedReadLocks; } }
462
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/WatchableGraph.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf; import java.util.Collection; import java.util.Iterator; import java.util.concurrent.locks.ReadWriteLock; import org.apache.clerezza.commons.rdf.event.FilterTriple; import org.apache.clerezza.commons.rdf.event.GraphListener; /** * An extension to the Graph interface that allows to add throws events * on modifications. * * @author reto */ public interface WatchableGraph extends Graph { /** * Adds the specified <code>GraphListener</code> to the graph. This listener * will be notified, when the graph is modified and the <code>Triple</code> * that was part of the modifiaction matched the specified * <code>FilterTriple</code>. The notification will be passed to the * listener after the specified delay time (in milli-seconds) has passed. * If more matching events occur during the delay period, then they are * passed all together at the end of the delay period. If the the listener * unregisters or the platform is stopped within the period then the already * occurred events may not be delivered. * * All implementations support this method, immutable implementations will * typically provide an empty implementation, they shall not throw an * exception. * * Implementation of which the triples change over time without add- and * remove-methods being called (e.g. implementation dynamically generating * their triples on invocation of the filer-method) may choose not to, or * only partially propagate their changes to the listener. They should * describe the behavior in the documentation of the class. * * Implementations should keep weak references the listeners, so that the * listener can be garbage collected if its no longer referenced by another * object. * * If delay is 0 notification will happen synchroneously. * * @param listener The listener that will be notified * @param filter The triple filter with which triples are tested, * that were part of the modification. * @param delay The time period afer which the listener will be notified in milliseconds. */ public void addGraphListener(GraphListener listener, FilterTriple filter, long delay); /** * Adds the specified <code>GraphListener</code> to the graph. This listener * will be notified, when the graph is modified and the <code>Triple</code> * that was part of the modifiaction matched the specified * <code>FilterTriple</code>. The notification will be passed without delay. * * Same as <code>addGraphListener(listener, filter, 0). * * @param listener The listener that will be notified * @param filter The triple filter with which triples are tested, * that were part of the modification. */ public void addGraphListener(GraphListener listener, FilterTriple filter); /** * Removes the specified <code>GraphListener</code> from the graph. This * listener will no longer be notified, when the graph is modified. * * @param listener The listener to be removed. */ public void removeGraphListener(GraphListener listener); }
463
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/Graph.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf; import java.util.Collection; import java.util.Iterator; import java.util.concurrent.locks.ReadWriteLock; import org.apache.clerezza.commons.rdf.event.FilterTriple; import org.apache.clerezza.commons.rdf.event.GraphListener; /** * A set of triples (as it doesn't allow duplicates), it does however * not extend {@link java.util.Set} as it doesn't inherit its * specification for <code>hashCode()</code> and <code>equals</code>. * It is possible to add <code>GraphListener</code> to listen for modifications * in the triples. * * @author reto */ public interface Graph extends Collection<Triple> { /** * Filters triples given a pattern. * filter(null, null, null) returns the same as iterator() * * @param subject * @param predicate * @param object * @return <code>Iterator</code> */ public Iterator<Triple> filter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object); /** * Returns true if <code>other</code> describes the same graph and will * always describe the same graph as this instance, false otherwise. * It returns true if this == other or if it * is otherwise guaranteed that changes to one of the instances are * immediately reflected in the other or if both graphs are immutable. * * @param other * @return true if other == this */ @Override public boolean equals(Object other); /** * Returns an ImutableGraph describing the graph at the current point in * time. if <code>this</code> is an instance of ImmutableGraph this can * safely return <code>this</code>. * * @return the current time slice of the possibly mutable graph represented by the instance. */ public ImmutableGraph getImmutableGraph(); /** * The lock provided by this methods allows to create read- and write-locks * that span multiple method calls. Having a read locks prevents other * threads from writing to this Graph, having a write-lock prevents other * threads from reading and writing. Implementations would typically * return a <code>java.util.concurrent.locks.ReentrantReadWriteLock</code>. * Immutable instances (such as instances of <code>ImmutableGraph</code>) * or instances used in transaction where concurrent acces of the same * instance is not an issue may return a no-op ReadWriteLock (i.e. one * which returned ReadLock and WriteLock instances of which the methods do * not do anything) * * @return the lock of this Graph */ ReadWriteLock getLock(); }
464
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/RDFTerm.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf; /** * An <a href= "http://www.w3.org/TR/rdf11-concepts/#dfn-rdf-term" >RDF-1.1 * Term</a>, as defined by <a href= "http://www.w3.org/TR/rdf11-concepts/" * >RDF-1.1 Concepts and Abstract Syntax</a>, a W3C Recommendation published on * 25 February 2014.<br> * * @see <a href= "http://www.w3.org/TR/rdf11-concepts/#dfn-rdf-term" >RDF-1.1 * Term</a> */ public interface RDFTerm { }
465
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/BlankNode.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf; /** * A Blank Node represents a resource, * but does not indicate a URI for the resource. Blank nodes act like * existentially qualified variables in first order logic. * * An <a href= "http://www.w3.org/TR/rdf11-concepts/#dfn-blank-node" >RDF-1.1 * Blank Node</a>, as defined by <a href= * "http://www.w3.org/TR/rdf11-concepts/#section-blank-nodes" >RDF-1.1 Concepts * and Abstract Syntax</a>, a W3C Recommendation published on 25 February 2014.<br> * * Note that: Blank nodes are disjoint from IRIs and literals. Otherwise, * the set of possible blank nodes is arbitrary. RDF makes no reference to any * internal structure of blank nodes. * * * @see <a href= "http://www.w3.org/TR/rdf11-concepts/#dfn-blank-node">RDF-1.1 * Blank Node</a> */ public class BlankNode implements BlankNodeOrIRI { }
466
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/ImmutableGraph.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf; /** * A graph, modeled as a set of triples. * This interface does not extend java.util.Set because of the different * identity constraints, i.e. two <code>Graph</code>s may be equal (isomorphic) * even if the set of triples are not. * * Implementations MUST be immutable and throw respective exceptions, when * add/remove-methods are called. * * @see org.apache.clerezza.rdf.core.impl.AbstractGraph * @author reto * */ public interface ImmutableGraph extends Graph { /** * Returns true if two graphs are isomorphic * * @return true if two graphs are isomorphic */ @Override public boolean equals(Object obj); /** * Return the sum of the blank-nodes independent hashes of the triples. * More precisely the hash of the triple is calculated as follows: * (hash(subject) >> 1) ^ hash(hashCode) ^ (hash(hashCode) << 1) * Where the hash-fucntion return the hashCode of the argument * for grounded arguments and 0 otherwise. * * @return hash code */ @Override public int hashCode(); }
467
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/Literal.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf; /** * Represents a literal value that can be a node in an RDF Graph. * Literals are used to identify values such as numbers and dates by * means of a lexical representation. There are two types of literals * represented by the subinterfaces {@link PlainLiteral} * and {@link TypedLiteral} * * @author reto */ public interface Literal extends RDFTerm { /** * The lexical form of this literal, represented by a <a * href="http://www.unicode.org/versions/latest/">Unicode string</a>. * * @return The lexical form of this literal. * @see <a * href="http://www.w3.org/TR/rdf11-concepts/#dfn-lexical-form">RDF-1.1 * Literal lexical form</a> */ String getLexicalForm(); /** * The IRI identifying the datatype that determines how the lexical form * maps to a literal value. * * @return The datatype IRI for this literal. * @see <a * href="http://www.w3.org/TR/rdf11-concepts/#dfn-datatype-iri">RDF-1.1 * Literal datatype IRI</a> */ IRI getDataType(); /** * If and only if the datatype IRI is <a * href="http://www.w3.org/1999/02/22-rdf-syntax-ns#langString" * >http://www.w3.org/1999/02/22-rdf-syntax-ns#langString</a>, the language * tag for this Literal is a language tag as defined by <a * href="http://tools.ietf.org/html/bcp47">BCP47</a>.<br> * If the datatype IRI is not <a * href="http://www.w3.org/1999/02/22-rdf-syntax-ns#langString" * >http://www.w3.org/1999/02/22-rdf-syntax-ns#langString</a>, this method * must null. * * @return The language tag of the literal or null if no language tag is defined * @see <a * href="http://www.w3.org/TR/rdf11-concepts/#dfn-language-tag">RDF-1.1 * Literal language tag</a> */ public Language getLanguage(); /** * Returns true if <code>obj</code> is an instance of * <code>literal</code> that is term-equal with this, false otherwise * * Two literals are term-equal (the same RDF literal) if and only if the * two lexical forms, the two datatype IRIs, and the two language tags (if * any) compare equal, character by character. * * @return true if obj equals this, false otherwise. */ public boolean equals(Object obj); /** * Returns the hash code of the lexical form plus the hash code of the * datatype plus if the literal has a language the hash code of the * language. * * @return hash code */ public int hashCode(); }
468
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/BlankNodeOrIRI.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf; /** * Represents a <code>Resource</code> that is not a <code>Literal</code>. * This is a marker interface implemented by <code>UriRef</code> * and <code>BNode</code>. * * @author reto */ public interface BlankNodeOrIRI extends RDFTerm { }
469
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/IRI.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf; import java.io.Serializable; /** * Represents an RDF URI Reference * * RDF URI References are defined in section 6.4 RDF URI References of * http://www.w3.org/TR/2004/REC-rdf-concepts-20040210/#section-Graph-URIref * * Note that an RDF URI Reference is not the same as defined by RFC3986, * RDF URI References support most unicode characters * * @author reto */ public class IRI implements BlankNodeOrIRI, Serializable { private String unicodeString; public IRI(String unicodeString) { this.unicodeString = unicodeString; } /** * @return the unicode string that produces the URI */ public String getUnicodeString() { return unicodeString; } /** * Returns true iff <code>obj</code> == <code>UriRef</code> * * @param obj * @return true if obj is an instanceof UriRef with * the same unicode-string, false otherwise */ @Override public boolean equals(Object obj) { if (!(obj instanceof IRI)) { return false; } return unicodeString.equals(((IRI) obj).getUnicodeString()); } /** * @return 5 + the hashcode of the string */ @Override public int hashCode() { int hash = 5 + unicodeString.hashCode(); return hash; } @Override public String toString() { StringBuilder buffer = new StringBuilder(); buffer.append('<'); buffer.append(unicodeString); buffer.append('>'); return buffer.toString(); } }
470
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/Triple.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf; /** * A structure containing a subject, a predicate, and an object. * Also known as a statement. * * @author reto */ public interface Triple { BlankNodeOrIRI getSubject(); IRI getPredicate(); RDFTerm getObject(); /** * * @param obj * @return true iff subject, predicate, and object of both triples are equal */ @Override boolean equals(Object obj); /** * The hash code is computed as follow * (subject.hashCode() >> 1) ^ predicate.hashCode() ^ object.hashCode() << 1) * * Note that the hash returned is computed including the hash of BNodes, so * it is not blank-node blind as in Graph. * * This would have to change if triple should extend Graph * * @return hash code */ @Override int hashCode(); }
471
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/Language.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf; /** * Represents a language as expressed by the RDF 4646 language tag * * @author reto */ public class Language { private String id; /** * Constructs the language tag defined by RDF 4646, normalized to lowercase. * * @param the id as defined by RDF 4646, normalized to lowercase. */ public Language(String id) { if ((id == null) || (id.equals(""))) { throw new IllegalArgumentException("A language id may not be null or empty"); } this.id = id.toLowerCase(); } @Override public boolean equals(Object other) { if (other == null) { return false; } if (other instanceof Language) { return id.equals(((Language) other).id); } else { return false; } } @Override public int hashCode() { return id.hashCode(); } @Override public String toString() { return id; } }
472
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/package-info.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Common RDF API */ package org.apache.clerezza.commons.rdf;
473
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/event/AddEvent.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.event; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.Graph; /** * This class represent a addition event that occured on a * <code>TripleCollection</code>. * * @author rbn */ public class AddEvent extends GraphEvent { public AddEvent(Graph graph, Triple triple) { super(graph, triple); } }
474
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/event/GraphEvent.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.event; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.Graph; /** * This class represent a modification event that occured on a * <code>TripleCollection</code>. A <code>GraphEvent</code> object keeps * information about this event. These information are: The <code>Triple</code> * that was part of the modification, the type of modification (addition or * removal) and the <code>TripleCollection</code> that was modified. * * @author mir */ public class GraphEvent { private Graph graph; private Triple triple; protected GraphEvent(Graph graph, Triple triple) { this.graph = graph; this.triple = triple; } /** * Returns the <code>TripleCollection</code> that was modified in the event. * @return the graph */ public Graph getGraph() { return graph; } /** * Return the <code>Triple</code> that was part of the modification. * @return the triple */ public Triple getTriple() { return triple; } }
475
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/event/GraphListener.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.event; import java.util.List; /** * A class that is interested in graph events implements this interface and * is then added as listener to a <code>ListenableTripleCollection</code> or * one of its subclasses. When the <code>ListenableTripleCollection</code> is * modified, then the <code>GraphListener</code> is notified. * * @author mir */ public interface GraphListener { /** * This method is called when a <code>ListenableTripleCollection</code> was * modified, to which this <code>GraphListener</code> was added. A * <code>List</code> containing <code>GraphEvent</code>s are passed as * argument. The list contains all events in which a triple was part of * the modification that matched the <code>FilterTriple</code> which was passed * as argument when the listener was added. * @param events */ public void graphChanged(List<GraphEvent> events); }
476
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/event/RemoveEvent.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.event; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.Graph; /** * This class represent a removal event that occured on a * <code>TripleCollection</code>. * * @author rbn */ public class RemoveEvent extends GraphEvent { public RemoveEvent(Graph graph, Triple triple) { super(graph, triple); } }
477
0
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf
Create_ds/clerezza-rdf-core/api/src/main/java/org/apache/clerezza/commons/rdf/event/FilterTriple.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.clerezza.commons.rdf.event; import org.apache.clerezza.commons.rdf.BlankNodeOrIRI; import org.apache.clerezza.commons.rdf.RDFTerm; import org.apache.clerezza.commons.rdf.Triple; import org.apache.clerezza.commons.rdf.IRI; /** * The <code>FilterTriple</code> class provides a match()-method that tests * if a <code>Triple</code> match a certain triple pattern. * * @author mir */ public class FilterTriple { private BlankNodeOrIRI subject; private IRI predicate; private RDFTerm object; /** * Creates a new <code>FilterTriple</code>. The specified subject, * predicate and object are used to test a given <code>Triple</code>. Any * of these values can be null, which acts as wildcard in the test. * * @param subject the subject. * @param predicate the predicate. * @param object the object. */ public FilterTriple (BlankNodeOrIRI subject, IRI predicate, RDFTerm object) { this.subject = subject; this.predicate = predicate; this.object = object; } /** * Returns true if the subject, predicate and object of the specified * <code>Triple</code> match the subject, predicate and object of this * <code>FilterTriple</code>. Null values in the <code>FilterTriple</code> * act as wildcards. * @param triple * @return */ public boolean match(Triple triple) { boolean subjectMatch, predicateMatch, objectMatch; if (this.subject == null) { subjectMatch = true; } else { subjectMatch = this.subject.equals(triple.getSubject()); } if (this.predicate == null) { predicateMatch = true; } else { predicateMatch = this.predicate.equals(triple.getPredicate()); } if (this.object == null) { objectMatch = true; } else { objectMatch = this.object.equals(triple.getObject()); } return subjectMatch && predicateMatch && objectMatch; } @Override public String toString() { return "FilterTriples: "+subject+" "+predicate+" "+object; } }
478
0
Create_ds/aws-iot-device-sdk-java-v2/samples/FleetProvisioning/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/FleetProvisioning/src/main/java/fleetprovisioning/Mqtt5FleetProvisioningSample.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package fleetprovisioning; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.MqttClientConnectionEvents; import software.amazon.awssdk.crt.mqtt.QualityOfService; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import software.amazon.awssdk.iot.iotidentity.IotIdentityClient; import software.amazon.awssdk.iot.iotidentity.model.CreateCertificateFromCsrRequest; import software.amazon.awssdk.iot.iotidentity.model.CreateCertificateFromCsrResponse; import software.amazon.awssdk.iot.iotidentity.model.CreateCertificateFromCsrSubscriptionRequest; import software.amazon.awssdk.iot.iotidentity.model.CreateKeysAndCertificateRequest; import software.amazon.awssdk.iot.iotidentity.model.CreateKeysAndCertificateResponse; import software.amazon.awssdk.iot.iotidentity.model.CreateKeysAndCertificateSubscriptionRequest; import software.amazon.awssdk.iot.iotidentity.model.ErrorResponse; import software.amazon.awssdk.iot.iotidentity.model.RegisterThingRequest; import software.amazon.awssdk.iot.iotidentity.model.RegisterThingResponse; import software.amazon.awssdk.iot.iotidentity.model.RegisterThingSubscriptionRequest; import java.nio.file.Files; import java.nio.file.Paths; import java.util.HashMap; import com.google.gson.Gson; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import utils.commandlineutils.CommandLineUtils; public class Mqtt5FleetProvisioningSample { // When run normally, we want to exit nicely even if something goes wrong. // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code. static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CompletableFuture<Void> gotResponse; static IotIdentityClient iotIdentityClient; static CreateKeysAndCertificateResponse createKeysAndCertificateResponse = null; static CreateCertificateFromCsrResponse createCertificateFromCsrResponse = null; static RegisterThingResponse registerThingResponse = null; static long responseWaitTimeMs = 5000L; // 5 seconds static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the * exec:java task * When called otherwise, print what went wrong (if anything) and just continue * (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("BasicConnect execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } static void onRejectedKeys(ErrorResponse response) { System.out.println("CreateKeysAndCertificate Request rejected, errorCode: " + response.errorCode + ", errorMessage: " + response.errorMessage + ", statusCode: " + response.statusCode); gotResponse.complete(null); } static void onRejectedCsr(ErrorResponse response) { System.out.println("CreateCertificateFromCsr Request rejected, errorCode: " + response.errorCode + ", errorMessage: " + response.errorMessage + ", statusCode: " + response.statusCode); gotResponse.complete(null); } static void onRejectedRegister(ErrorResponse response) { System.out.println("RegisterThing Request rejected, errorCode: " + response.errorCode + ", errorMessage: " + response.errorMessage + ", statusCode: " + response.statusCode); gotResponse.complete(null); } static void onCreateKeysAndCertificateAccepted(CreateKeysAndCertificateResponse response) { if (response != null) { System.out.println("CreateKeysAndCertificate response certificateId: " + response.certificateId); if (createKeysAndCertificateResponse == null) { createKeysAndCertificateResponse = response; } else { System.out .println("CreateKeysAndCertificate response received after having already gotten a response!"); } } else { System.out.println("CreateKeysAndCertificate response is null"); } gotResponse.complete(null); } static void onCreateCertificateFromCsrResponseAccepted(CreateCertificateFromCsrResponse response) { if (response != null) { System.out.println("CreateCertificateFromCsr response certificateId: " + response.certificateId); if (createCertificateFromCsrResponse == null) { createCertificateFromCsrResponse = response; } else { System.out .println("CreateCertificateFromCsr response received after having already gotten a response!"); } } else { System.out.println("CreateCertificateFromCsr response is null"); } gotResponse.complete(null); } static void onRegisterThingAccepted(RegisterThingResponse response) { if (response != null) { System.out.println("RegisterThing response thingName: " + response.thingName); if (registerThingResponse == null) { registerThingResponse = response; } else { System.out.println("RegisterThing response received after having already gotten a response!"); } } else { System.out.println("RegisterThing response is null"); } gotResponse.complete(null); } static void onException(Exception e) { e.printStackTrace(); System.out.println("Exception occurred " + e); } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single * struct for * use in this sample. This handles all of the command line parsing, validating, * etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils .getInputForIoTSample("FleetProvisioningSample", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; MqttClientConnection connection = null; boolean exitWithError = false; try { /** * Create the MQTT connection from the builder */ AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newMtlsBuilderFromPath(cmdData.input_cert, cmdData.input_key); if (cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short) cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); connection = builder.build(); builder.close(); /** * Verify the connection was created */ if (connection == null) { onApplicationFailure(new RuntimeException("MQTT connection creation failed!")); } // Create the identity client (Identity = Fleet Provisioning) iotIdentityClient = new IotIdentityClient(connection); // Connect CompletableFuture<Boolean> connected = connection.connect(); boolean sessionPresent = connected.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); // Fleet Provision based on whether there is a CSR file path or not if (cmdData.input_csrPath == null) { createKeysAndCertificateWorkflow(cmdData.input_templateName, cmdData.input_templateParameters); } else { createCertificateFromCsrWorkflow(cmdData.input_templateName, cmdData.input_templateParameters, cmdData.input_csrPath); } // Disconnect CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); } catch (Exception ex) { System.out.println("Exception encountered! " + "\n"); ex.printStackTrace(); exitWithError = true; } finally { if (connection != null) { // Close the connection now that we are completely done with it. connection.close(); } } CrtResource.waitForNoResources(); System.out.println("Sample complete!"); if (exitWithError) { System.exit(1); } else { System.exit(0); } } private static void SubscribeToRegisterThing(String input_templateName) throws Exception { RegisterThingSubscriptionRequest registerThingSubscriptionRequest = new RegisterThingSubscriptionRequest(); registerThingSubscriptionRequest.templateName = input_templateName; CompletableFuture<Integer> subscribedRegisterAccepted = iotIdentityClient.SubscribeToRegisterThingAccepted( registerThingSubscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5FleetProvisioningSample::onRegisterThingAccepted, Mqtt5FleetProvisioningSample::onException); subscribedRegisterAccepted.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Subscribed to SubscribeToRegisterThingAccepted"); CompletableFuture<Integer> subscribedRegisterRejected = iotIdentityClient.SubscribeToRegisterThingRejected( registerThingSubscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5FleetProvisioningSample::onRejectedRegister, Mqtt5FleetProvisioningSample::onException); subscribedRegisterRejected.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Subscribed to SubscribeToRegisterThingRejected"); } private static void createKeysAndCertificateWorkflow(String input_templateName, String input_templateParameters) throws Exception { CreateKeysAndCertificateSubscriptionRequest createKeysAndCertificateSubscriptionRequest = new CreateKeysAndCertificateSubscriptionRequest(); CompletableFuture<Integer> keysSubscribedAccepted = iotIdentityClient .SubscribeToCreateKeysAndCertificateAccepted( createKeysAndCertificateSubscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5FleetProvisioningSample::onCreateKeysAndCertificateAccepted); keysSubscribedAccepted.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Subscribed to CreateKeysAndCertificateAccepted"); CompletableFuture<Integer> keysSubscribedRejected = iotIdentityClient .SubscribeToCreateKeysAndCertificateRejected( createKeysAndCertificateSubscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5FleetProvisioningSample::onRejectedKeys); keysSubscribedRejected.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Subscribed to CreateKeysAndCertificateRejected"); // Subscribes to the register thing accepted and rejected topics SubscribeToRegisterThing(input_templateName); CompletableFuture<Integer> publishKeys = iotIdentityClient.PublishCreateKeysAndCertificate( new CreateKeysAndCertificateRequest(), QualityOfService.AT_LEAST_ONCE); gotResponse = new CompletableFuture<>(); publishKeys.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Published to CreateKeysAndCertificate"); gotResponse.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Got response at CreateKeysAndCertificate"); // Verify the response is good if (createKeysAndCertificateResponse == null) { throw new Exception("Got invalid/error createKeysAndCertificateResponse"); } gotResponse = new CompletableFuture<>(); System.out.println("RegisterThing now...."); RegisterThingRequest registerThingRequest = new RegisterThingRequest(); registerThingRequest.certificateOwnershipToken = createKeysAndCertificateResponse.certificateOwnershipToken; registerThingRequest.templateName = input_templateName; if (input_templateParameters != null && input_templateParameters != "") { registerThingRequest.parameters = new Gson().fromJson(input_templateParameters, HashMap.class); } CompletableFuture<Integer> publishRegister = iotIdentityClient.PublishRegisterThing( registerThingRequest, QualityOfService.AT_LEAST_ONCE); publishRegister.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Published to RegisterThing"); gotResponse.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Got response at RegisterThing"); } private static void createCertificateFromCsrWorkflow(String input_templateName, String input_templateParameters, String input_csrPath) throws Exception { CreateCertificateFromCsrSubscriptionRequest createCertificateFromCsrSubscriptionRequest = new CreateCertificateFromCsrSubscriptionRequest(); CompletableFuture<Integer> csrSubscribedAccepted = iotIdentityClient .SubscribeToCreateCertificateFromCsrAccepted( createCertificateFromCsrSubscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5FleetProvisioningSample::onCreateCertificateFromCsrResponseAccepted); csrSubscribedAccepted.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Subscribed to CreateCertificateFromCsrAccepted"); CompletableFuture<Integer> csrSubscribedRejected = iotIdentityClient .SubscribeToCreateCertificateFromCsrRejected( createCertificateFromCsrSubscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5FleetProvisioningSample::onRejectedCsr); csrSubscribedRejected.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Subscribed to CreateCertificateFromCsrRejected"); // Subscribes to the register thing accepted and rejected topics SubscribeToRegisterThing(input_templateName); String csrContents = new String(Files.readAllBytes(Paths.get(input_csrPath))); CreateCertificateFromCsrRequest createCertificateFromCsrRequest = new CreateCertificateFromCsrRequest(); createCertificateFromCsrRequest.certificateSigningRequest = csrContents; CompletableFuture<Integer> publishCsr = iotIdentityClient.PublishCreateCertificateFromCsr( createCertificateFromCsrRequest, QualityOfService.AT_LEAST_ONCE); gotResponse = new CompletableFuture<>(); publishCsr.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Published to CreateCertificateFromCsr"); gotResponse.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Got response at CreateCertificateFromCsr"); // Verify the response is good if (createCertificateFromCsrResponse == null) { throw new Exception("Got invalid/error createCertificateFromCsrResponse"); } gotResponse = new CompletableFuture<>(); System.out.println("RegisterThing now...."); RegisterThingRequest registerThingRequest = new RegisterThingRequest(); registerThingRequest.certificateOwnershipToken = createCertificateFromCsrResponse.certificateOwnershipToken; registerThingRequest.templateName = input_templateName; registerThingRequest.parameters = new Gson().fromJson(input_templateParameters, HashMap.class); CompletableFuture<Integer> publishRegister = iotIdentityClient.PublishRegisterThing( registerThingRequest, QualityOfService.AT_LEAST_ONCE); publishRegister.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Published to RegisterThing"); gotResponse.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Got response at RegisterThing"); } }
479
0
Create_ds/aws-iot-device-sdk-java-v2/samples/FleetProvisioning/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/FleetProvisioning/src/main/java/fleetprovisioning/FleetProvisioningSample.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package fleetprovisioning; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.MqttClientConnectionEvents; import software.amazon.awssdk.crt.mqtt.QualityOfService; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import software.amazon.awssdk.iot.iotidentity.IotIdentityClient; import software.amazon.awssdk.iot.iotidentity.model.CreateCertificateFromCsrRequest; import software.amazon.awssdk.iot.iotidentity.model.CreateCertificateFromCsrResponse; import software.amazon.awssdk.iot.iotidentity.model.CreateCertificateFromCsrSubscriptionRequest; import software.amazon.awssdk.iot.iotidentity.model.CreateKeysAndCertificateRequest; import software.amazon.awssdk.iot.iotidentity.model.CreateKeysAndCertificateResponse; import software.amazon.awssdk.iot.iotidentity.model.CreateKeysAndCertificateSubscriptionRequest; import software.amazon.awssdk.iot.iotidentity.model.ErrorResponse; import software.amazon.awssdk.iot.iotidentity.model.RegisterThingRequest; import software.amazon.awssdk.iot.iotidentity.model.RegisterThingResponse; import software.amazon.awssdk.iot.iotidentity.model.RegisterThingSubscriptionRequest; import java.nio.file.Files; import java.nio.file.Paths; import java.util.HashMap; import com.google.gson.Gson; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import utils.commandlineutils.CommandLineUtils; public class FleetProvisioningSample { // When run normally, we want to exit nicely even if something goes wrong. // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code. static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CompletableFuture<Void> gotResponse; static IotIdentityClient iotIdentityClient; static CreateKeysAndCertificateResponse createKeysAndCertificateResponse = null; static CreateCertificateFromCsrResponse createCertificateFromCsrResponse = null; static RegisterThingResponse registerThingResponse = null; static long responseWaitTimeMs = 5000L; // 5 seconds static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the exec:java task * When called otherwise, print what went wrong (if anything) and just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("BasicConnect execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } static void onRejectedKeys(ErrorResponse response) { System.out.println("CreateKeysAndCertificate Request rejected, errorCode: " + response.errorCode + ", errorMessage: " + response.errorMessage + ", statusCode: " + response.statusCode); gotResponse.complete(null); } static void onRejectedCsr(ErrorResponse response) { System.out.println("CreateCertificateFromCsr Request rejected, errorCode: " + response.errorCode + ", errorMessage: " + response.errorMessage + ", statusCode: " + response.statusCode); gotResponse.complete(null); } static void onRejectedRegister(ErrorResponse response) { System.out.println("RegisterThing Request rejected, errorCode: " + response.errorCode + ", errorMessage: " + response.errorMessage + ", statusCode: " + response.statusCode); gotResponse.complete(null); } static void onCreateKeysAndCertificateAccepted(CreateKeysAndCertificateResponse response) { if (response != null) { System.out.println("CreateKeysAndCertificate response certificateId: " + response.certificateId); if (createKeysAndCertificateResponse == null) { createKeysAndCertificateResponse = response; } else { System.out.println("CreateKeysAndCertificate response received after having already gotten a response!"); } } else { System.out.println("CreateKeysAndCertificate response is null"); } gotResponse.complete(null); } static void onCreateCertificateFromCsrResponseAccepted(CreateCertificateFromCsrResponse response) { if (response != null) { System.out.println("CreateCertificateFromCsr response certificateId: " + response.certificateId); if (createCertificateFromCsrResponse == null) { createCertificateFromCsrResponse = response; } else { System.out.println("CreateCertificateFromCsr response received after having already gotten a response!"); } } else { System.out.println("CreateCertificateFromCsr response is null"); } gotResponse.complete(null); } static void onRegisterThingAccepted(RegisterThingResponse response) { if (response != null) { System.out.println("RegisterThing response thingName: " + response.thingName); if (registerThingResponse == null) { registerThingResponse = response; } else { System.out.println("RegisterThing response received after having already gotten a response!"); } } else { System.out.println("RegisterThing response is null"); } gotResponse.complete(null); } static void onException(Exception e) { e.printStackTrace(); System.out.println("Exception occurred " + e); } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("FleetProvisioningSample", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; MqttClientConnection connection = null; boolean exitWithError = false; try { /** * Create the MQTT connection from the builder */ AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newMtlsBuilderFromPath(cmdData.input_cert, cmdData.input_key); if (cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short)cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); connection = builder.build(); builder.close(); /** * Verify the connection was created */ if (connection == null) { onApplicationFailure(new RuntimeException("MQTT connection creation failed!")); } // Create the identity client (Identity = Fleet Provisioning) iotIdentityClient = new IotIdentityClient(connection); // Connect CompletableFuture<Boolean> connected = connection.connect(); boolean sessionPresent = connected.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); // Fleet Provision based on whether there is a CSR file path or not if (cmdData.input_csrPath == null) { createKeysAndCertificateWorkflow(cmdData.input_templateName, cmdData.input_templateParameters); } else { createCertificateFromCsrWorkflow(cmdData.input_templateName, cmdData.input_templateParameters, cmdData.input_csrPath); } // Disconnect CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); } catch (Exception ex) { System.out.println("Exception encountered! " + "\n"); ex.printStackTrace(); exitWithError = true; } finally { if (connection != null) { // Close the connection now that we are completely done with it. connection.close(); } } CrtResource.waitForNoResources(); System.out.println("Sample complete!"); if (exitWithError) { System.exit(1); } else { System.exit(0); } } private static void SubscribeToRegisterThing(String input_templateName) throws Exception { RegisterThingSubscriptionRequest registerThingSubscriptionRequest = new RegisterThingSubscriptionRequest(); registerThingSubscriptionRequest.templateName = input_templateName; CompletableFuture<Integer> subscribedRegisterAccepted = iotIdentityClient.SubscribeToRegisterThingAccepted( registerThingSubscriptionRequest, QualityOfService.AT_LEAST_ONCE, FleetProvisioningSample::onRegisterThingAccepted, FleetProvisioningSample::onException); subscribedRegisterAccepted.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Subscribed to SubscribeToRegisterThingAccepted"); CompletableFuture<Integer> subscribedRegisterRejected = iotIdentityClient.SubscribeToRegisterThingRejected( registerThingSubscriptionRequest, QualityOfService.AT_LEAST_ONCE, FleetProvisioningSample::onRejectedRegister, FleetProvisioningSample::onException); subscribedRegisterRejected.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Subscribed to SubscribeToRegisterThingRejected"); } private static void createKeysAndCertificateWorkflow(String input_templateName, String input_templateParameters) throws Exception { CreateKeysAndCertificateSubscriptionRequest createKeysAndCertificateSubscriptionRequest = new CreateKeysAndCertificateSubscriptionRequest(); CompletableFuture<Integer> keysSubscribedAccepted = iotIdentityClient.SubscribeToCreateKeysAndCertificateAccepted( createKeysAndCertificateSubscriptionRequest, QualityOfService.AT_LEAST_ONCE, FleetProvisioningSample::onCreateKeysAndCertificateAccepted); keysSubscribedAccepted.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Subscribed to CreateKeysAndCertificateAccepted"); CompletableFuture<Integer> keysSubscribedRejected = iotIdentityClient.SubscribeToCreateKeysAndCertificateRejected( createKeysAndCertificateSubscriptionRequest, QualityOfService.AT_LEAST_ONCE, FleetProvisioningSample::onRejectedKeys); keysSubscribedRejected.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Subscribed to CreateKeysAndCertificateRejected"); // Subscribes to the register thing accepted and rejected topics SubscribeToRegisterThing(input_templateName); CompletableFuture<Integer> publishKeys = iotIdentityClient.PublishCreateKeysAndCertificate( new CreateKeysAndCertificateRequest(), QualityOfService.AT_LEAST_ONCE); gotResponse = new CompletableFuture<>(); publishKeys.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Published to CreateKeysAndCertificate"); gotResponse.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Got response at CreateKeysAndCertificate"); // Verify the response is good if (createKeysAndCertificateResponse == null) { throw new Exception("Got invalid/error createKeysAndCertificateResponse"); } gotResponse = new CompletableFuture<>(); System.out.println("RegisterThing now...."); RegisterThingRequest registerThingRequest = new RegisterThingRequest(); registerThingRequest.certificateOwnershipToken = createKeysAndCertificateResponse.certificateOwnershipToken; registerThingRequest.templateName = input_templateName; if (input_templateParameters != null && input_templateParameters != "") { registerThingRequest.parameters = new Gson().fromJson(input_templateParameters, HashMap.class); } CompletableFuture<Integer> publishRegister = iotIdentityClient.PublishRegisterThing( registerThingRequest, QualityOfService.AT_LEAST_ONCE); publishRegister.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Published to RegisterThing"); gotResponse.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Got response at RegisterThing"); } private static void createCertificateFromCsrWorkflow(String input_templateName, String input_templateParameters, String input_csrPath) throws Exception { CreateCertificateFromCsrSubscriptionRequest createCertificateFromCsrSubscriptionRequest = new CreateCertificateFromCsrSubscriptionRequest(); CompletableFuture<Integer> csrSubscribedAccepted = iotIdentityClient.SubscribeToCreateCertificateFromCsrAccepted( createCertificateFromCsrSubscriptionRequest, QualityOfService.AT_LEAST_ONCE, FleetProvisioningSample::onCreateCertificateFromCsrResponseAccepted); csrSubscribedAccepted.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Subscribed to CreateCertificateFromCsrAccepted"); CompletableFuture<Integer> csrSubscribedRejected = iotIdentityClient.SubscribeToCreateCertificateFromCsrRejected( createCertificateFromCsrSubscriptionRequest, QualityOfService.AT_LEAST_ONCE, FleetProvisioningSample::onRejectedCsr); csrSubscribedRejected.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Subscribed to CreateCertificateFromCsrRejected"); // Subscribes to the register thing accepted and rejected topics SubscribeToRegisterThing(input_templateName); String csrContents = new String(Files.readAllBytes(Paths.get(input_csrPath))); CreateCertificateFromCsrRequest createCertificateFromCsrRequest = new CreateCertificateFromCsrRequest(); createCertificateFromCsrRequest.certificateSigningRequest = csrContents; CompletableFuture<Integer> publishCsr = iotIdentityClient.PublishCreateCertificateFromCsr( createCertificateFromCsrRequest, QualityOfService.AT_LEAST_ONCE); gotResponse = new CompletableFuture<>(); publishCsr.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Published to CreateCertificateFromCsr"); gotResponse.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Got response at CreateCertificateFromCsr"); // Verify the response is good if (createCertificateFromCsrResponse == null) { throw new Exception("Got invalid/error createCertificateFromCsrResponse"); } gotResponse = new CompletableFuture<>(); System.out.println("RegisterThing now...."); RegisterThingRequest registerThingRequest = new RegisterThingRequest(); registerThingRequest.certificateOwnershipToken = createCertificateFromCsrResponse.certificateOwnershipToken; registerThingRequest.templateName = input_templateName; registerThingRequest.parameters = new Gson().fromJson(input_templateParameters, HashMap.class); CompletableFuture<Integer> publishRegister = iotIdentityClient.PublishRegisterThing( registerThingRequest, QualityOfService.AT_LEAST_ONCE); publishRegister.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Published to RegisterThing"); gotResponse.get(responseWaitTimeMs, TimeUnit.MILLISECONDS); System.out.println("Got response at RegisterThing"); } }
480
0
Create_ds/aws-iot-device-sdk-java-v2/samples/CustomKeyOpsConnect/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/CustomKeyOpsConnect/src/main/java/customkeyopsconnect/CustomKeyOpsConnect.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package customkeyopsconnect; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.io.*; import software.amazon.awssdk.crt.mqtt.*; import software.amazon.awssdk.crt.http.HttpProxyOptions; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.FileReader; import java.security.KeyFactory; import java.security.PrivateKey; import java.security.Signature; import java.security.interfaces.RSAPrivateKey; import java.security.spec.PKCS8EncodedKeySpec; import java.util.Base64; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import utils.commandlineutils.CommandLineUtils; public class CustomKeyOpsConnect { // When run normally, we want to exit nicely even if something goes wrong // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the exec:java task * When called otherwise, print what went wrong (if anything) and just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("CustomKeyOpsPubSub execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } static class MyKeyOperationHandler implements TlsKeyOperationHandler { RSAPrivateKey key; MyKeyOperationHandler(String keyPath) { key = loadPrivateKey(keyPath); } public void performOperation(TlsKeyOperation operation) { try { System.out.println("MyKeyOperationHandler.performOperation" + operation.getType().name()); if (operation.getType() != TlsKeyOperation.Type.SIGN) { throw new RuntimeException("Simple sample only handles SIGN operations"); } if (operation.getSignatureAlgorithm() != TlsSignatureAlgorithm.RSA) { throw new RuntimeException("Simple sample only handles RSA keys"); } if (operation.getDigestAlgorithm() != TlsHashAlgorithm.SHA256) { throw new RuntimeException("Simple sample only handles SHA256 digests"); } // A SIGN operation's inputData is the 32bytes of the SHA-256 digest. // Before doing the RSA signature, we need to construct a PKCS1 v1.5 DigestInfo. // See https://datatracker.ietf.org/doc/html/rfc3447#section-9.2 byte[] digest = operation.getInput(); // These are the appropriate bytes for the SHA-256 AlgorithmIdentifier: // https://tools.ietf.org/html/rfc3447#page-43 byte[] sha256DigestAlgorithm = { 0x30, 0x31, 0x30, 0x0d, 0x06, 0x09, 0x60, (byte)0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01, 0x05, 0x00, 0x04, 0x20 }; ByteArrayOutputStream digestInfoStream = new ByteArrayOutputStream(); digestInfoStream.write(sha256DigestAlgorithm); digestInfoStream.write(digest); byte[] digestInfo = digestInfoStream.toByteArray(); // Sign the DigestInfo Signature rsaSign = Signature.getInstance("NONEwithRSA"); rsaSign.initSign(key); rsaSign.update(digestInfo); byte[] signatureBytes = rsaSign.sign(); operation.complete(signatureBytes); } catch (Exception ex) { System.out.println("Error during key operation:" + ex); operation.completeExceptionally(ex); } } RSAPrivateKey loadPrivateKey(String filepath) { /* Adapted from: https://stackoverflow.com/a/27621696 * You probably need to convert your private key file from PKCS#1 * to PKCS#8 to get it working with this sample: * * $ openssl pkcs8 -topk8 -in my-private.pem.key -out my-private-pk8.pem.key -nocrypt * * IoT Core vends keys as PKCS#1 by default, * but Java only seems to have this PKCS8EncodedKeySpec class */ try { /* Read the BASE64-encoded contents of the private key file */ StringBuilder pemBase64 = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new FileReader(filepath))) { String line; while ((line = reader.readLine()) != null) { // Strip off PEM header and footer if (line.startsWith("---")) { if (line.contains("RSA")) { throw new RuntimeException("private key must be converted from PKCS#1 to PKCS#8"); } continue; } pemBase64.append(line); } } String pemBase64String = pemBase64.toString(); byte[] der = Base64.getDecoder().decode(pemBase64String); /* Create PrivateKey instance */ PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(der); KeyFactory keyFactory = KeyFactory.getInstance("RSA"); PrivateKey privateKey = keyFactory.generatePrivate(keySpec); return (RSAPrivateKey)privateKey; } catch (Exception ex) { throw new RuntimeException(ex); } } } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("CustomKeyOpsConnect", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; MyKeyOperationHandler myKeyOperationHandler = new MyKeyOperationHandler(cmdData.input_key); TlsContextCustomKeyOperationOptions keyOperationOptions = new TlsContextCustomKeyOperationOptions(myKeyOperationHandler) .withCertificateFilePath(cmdData.input_cert); try { /** * Create the MQTT connection from the builder */ AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newMtlsCustomKeyOperationsBuilder(keyOperationOptions); if (cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short)cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); if (cmdData.input_proxyHost != "" && cmdData.input_proxyPort > 0) { HttpProxyOptions proxyOptions = new HttpProxyOptions(); proxyOptions.setHost(cmdData.input_proxyHost); proxyOptions.setPort(cmdData.input_proxyPort); builder.withHttpProxyOptions(proxyOptions); } MqttClientConnection connection = builder.build(); builder.close(); /** * Verify the connection was created */ if (connection == null) { onApplicationFailure(new RuntimeException("MQTT connection creation failed!")); } /** * Connect and disconnect */ CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } System.out.println("Disconnecting..."); CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); System.out.println("Disconnected."); /** * Close the connection now that it is complete */ connection.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { onApplicationFailure(ex); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
481
0
Create_ds/aws-iot-device-sdk-java-v2/samples/Shadow/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/Shadow/src/main/java/shadow/Mqtt5ShadowSample.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package shadow; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.QualityOfService; import software.amazon.awssdk.crt.mqtt5.packets.*; import software.amazon.awssdk.crt.mqtt5.*; import software.amazon.awssdk.crt.mqtt5.Mqtt5ClientOptions; import software.amazon.awssdk.crt.mqtt5.Mqtt5Client; import software.amazon.awssdk.iot.AwsIotMqtt5ClientBuilder; import software.amazon.awssdk.iot.iotshadow.IotShadowClient; import software.amazon.awssdk.iot.iotshadow.model.ErrorResponse; import software.amazon.awssdk.iot.iotshadow.model.GetShadowRequest; import software.amazon.awssdk.iot.iotshadow.model.GetShadowResponse; import software.amazon.awssdk.iot.iotshadow.model.GetShadowSubscriptionRequest; import software.amazon.awssdk.iot.iotshadow.model.ShadowDeltaUpdatedEvent; import software.amazon.awssdk.iot.iotshadow.model.ShadowDeltaUpdatedSubscriptionRequest; import software.amazon.awssdk.iot.iotshadow.model.ShadowState; import software.amazon.awssdk.iot.iotshadow.model.UpdateShadowRequest; import software.amazon.awssdk.iot.iotshadow.model.UpdateShadowResponse; import software.amazon.awssdk.iot.iotshadow.model.UpdateShadowSubscriptionRequest; import java.util.HashMap; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.ExecutionException; import java.util.Scanner; import utils.commandlineutils.CommandLineUtils; public class Mqtt5ShadowSample { static final class SampleLifecycleEvents implements Mqtt5ClientOptions.LifecycleEvents { CompletableFuture<Void> connectedFuture = new CompletableFuture<>(); CompletableFuture<Void> stoppedFuture = new CompletableFuture<>(); @Override public void onAttemptingConnect(Mqtt5Client client, OnAttemptingConnectReturn onAttemptingConnectReturn) { System.out.println("Mqtt5 Client: Attempting connection..."); } @Override public void onConnectionSuccess(Mqtt5Client client, OnConnectionSuccessReturn onConnectionSuccessReturn) { System.out.println("Mqtt5 Client: Connection success, client ID: " + onConnectionSuccessReturn.getNegotiatedSettings().getAssignedClientID()); connectedFuture.complete(null); } @Override public void onConnectionFailure(Mqtt5Client client, OnConnectionFailureReturn onConnectionFailureReturn) { String errorString = CRT.awsErrorString(onConnectionFailureReturn.getErrorCode()); System.out.println("Mqtt5 Client: Connection failed with error: " + errorString); connectedFuture.completeExceptionally(new Exception("Could not connect: " + errorString)); } @Override public void onDisconnection(Mqtt5Client client, OnDisconnectionReturn onDisconnectionReturn) { System.out.println("Mqtt5 Client: Disconnected"); DisconnectPacket disconnectPacket = onDisconnectionReturn.getDisconnectPacket(); if (disconnectPacket != null) { System.out.println("\tDisconnection packet code: " + disconnectPacket.getReasonCode()); System.out.println("\tDisconnection packet reason: " + disconnectPacket.getReasonString()); } } @Override public void onStopped(Mqtt5Client client, OnStoppedReturn onStoppedReturn) { System.out.println("Mqtt5 Client: Stopped"); stoppedFuture.complete(null); } } // When run normally, we want to get input from the console // When run from CI, we want to automatically make changes to the shadow // document static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static String input_thingName; final static String SHADOW_PROPERTY = "color"; final static String SHADOW_VALUE_DEFAULT = "off"; static IotShadowClient shadow; static String localValue = null; static CompletableFuture<Void> gotResponse; static CommandLineUtils cmdUtils; static void onGetShadowAccepted(GetShadowResponse response) { System.out.println("Received initial shadow state"); if (response.state != null && localValue == null) { gotResponse.complete(null); if (response.state.delta != null) { String value = response.state.delta.get(SHADOW_PROPERTY).toString(); System.out.println(" Shadow delta value: " + value); return; } if (response.state.reported != null) { String value = response.state.reported.get(SHADOW_PROPERTY).toString(); System.out.println(" Shadow reported value: " + value); // Initialize local value to match the reported shadow value localValue = value; return; } } System.out.println(" Shadow document has no value for " + SHADOW_PROPERTY + ". Setting default..."); changeShadowValue(SHADOW_VALUE_DEFAULT); } static void onGetShadowRejected(ErrorResponse response) { if (response.code == 404) { System.out.println("Thing has no shadow document. Creating with defaults..."); changeShadowValue(SHADOW_VALUE_DEFAULT); return; } gotResponse.complete(null); System.out.println("GetShadow request was rejected: code: " + response.code + " message: " + response.message); System.exit(1); } static void onShadowDeltaUpdated(ShadowDeltaUpdatedEvent response) { System.out.println("Shadow delta updated"); if (response.state != null && response.state.containsKey(SHADOW_PROPERTY)) { String value = response.state.get(SHADOW_PROPERTY).toString(); System.out.println(" Delta wants to change value to '" + value + "'. Changing local value..."); if (!response.clientToken.isEmpty()) { System.out.print(" ClientToken: " + response.clientToken + "\n"); } changeShadowValue(value); } else { System.out.println(" Delta did not report a change in " + SHADOW_PROPERTY); } } static void onUpdateShadowAccepted(UpdateShadowResponse response) { if (response.state.reported != null) { if (response.state.reported.containsKey(SHADOW_PROPERTY)) { String value = response.state.reported.get(SHADOW_PROPERTY).toString(); System.out.println("Shadow updated, value is " + value); } else { System.out.println("Shadow updated, value is Null"); } } else { if (response.state.reportedIsNullable == true) { System.out.println("Shadow updated, reported and desired is null"); } else { System.out.println("Shadow update, data cleared"); } } gotResponse.complete(null); } static void onUpdateShadowRejected(ErrorResponse response) { System.out.println("Shadow update was rejected: code: " + response.code + " message: " + response.message); System.exit(2); } static CompletableFuture<Void> changeShadowValue(String value) { if (localValue != null) { if (localValue.equals(value)) { System.out.println("Local value is already " + value); CompletableFuture<Void> result = new CompletableFuture<>(); result.complete(null); return result; } } System.out.println("Changed local value to " + value); localValue = value; System.out.println("Updating shadow value to " + value); // build a request to let the service know our current value and desired value, // and that we only want // to update if the version matches the version we know about UpdateShadowRequest request = new UpdateShadowRequest(); request.thingName = input_thingName; request.state = new ShadowState(); if (value.compareToIgnoreCase("clear_shadow") == 0) { request.state.desiredIsNullable = true; request.state.reportedIsNullable = true; request.state.desired = null; request.state.reported = null; } else if (value.compareToIgnoreCase("null") == 0) { // A bit of a hack - we have to set reportedNullIsValid OR desiredNullIsValid // so the JSON formatter will allow null , otherwise null will always be // be converted to "null" // As long as we're passing a Hashmap that is NOT assigned to null, it will not // clear the data - so we pass an empty HashMap to avoid clearing data we want // to keep request.state.desiredIsNullable = true; request.state.reportedIsNullable = false; // We will only clear desired, so we need to pass an empty HashMap for reported request.state.reported = new HashMap<String, Object>() { { } }; request.state.desired = new HashMap<String, Object>() { { put(SHADOW_PROPERTY, null); } }; } else { request.state.reported = new HashMap<String, Object>() { { put(SHADOW_PROPERTY, value); } }; request.state.desired = new HashMap<String, Object>() { { put(SHADOW_PROPERTY, value); } }; } // Publish the request return shadow.PublishUpdateShadow(request, QualityOfService.AT_LEAST_ONCE).thenRun(() -> { System.out.println("Update request published"); }).exceptionally((ex) -> { System.out.println("Update request failed: " + ex.getMessage()); System.exit(3); return null; }); } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single * struct for * use in this sample. This handles all of the command line parsing, validating, * etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("Shadow", args); input_thingName = cmdData.input_thingName; try { /** * Create the MQTT5 client from the builder */ SampleLifecycleEvents lifecycleEvents = new SampleLifecycleEvents(); AwsIotMqtt5ClientBuilder builder = AwsIotMqtt5ClientBuilder.newDirectMqttBuilderWithMtlsFromPath( cmdData.input_endpoint, cmdData.input_cert, cmdData.input_key); ConnectPacket.ConnectPacketBuilder connectProperties = new ConnectPacket.ConnectPacketBuilder(); connectProperties.withClientId(cmdData.input_clientId); builder.withConnectProperties(connectProperties); builder.withLifeCycleEvents(lifecycleEvents); Mqtt5Client client = builder.build(); builder.close(); MqttClientConnection connection = new MqttClientConnection(client, null); // Create the shadow client, IotShadowClient throws MqttException shadow = new IotShadowClient(connection); // Connect client.start(); try { lifecycleEvents.connectedFuture.get(60, TimeUnit.SECONDS); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } /** * Subscribe to shadow topics */ System.out.println("Subscribing to shadow delta events..."); ShadowDeltaUpdatedSubscriptionRequest requestShadowDeltaUpdated = new ShadowDeltaUpdatedSubscriptionRequest(); requestShadowDeltaUpdated.thingName = input_thingName; CompletableFuture<Integer> subscribedToDeltas = shadow.SubscribeToShadowDeltaUpdatedEvents( requestShadowDeltaUpdated, QualityOfService.AT_LEAST_ONCE, Mqtt5ShadowSample::onShadowDeltaUpdated); subscribedToDeltas.get(); System.out.println("Subscribing to update responses..."); UpdateShadowSubscriptionRequest requestUpdateShadow = new UpdateShadowSubscriptionRequest(); requestUpdateShadow.thingName = input_thingName; CompletableFuture<Integer> subscribedToUpdateAccepted = shadow.SubscribeToUpdateShadowAccepted( requestUpdateShadow, QualityOfService.AT_LEAST_ONCE, Mqtt5ShadowSample::onUpdateShadowAccepted); CompletableFuture<Integer> subscribedToUpdateRejected = shadow.SubscribeToUpdateShadowRejected( requestUpdateShadow, QualityOfService.AT_LEAST_ONCE, Mqtt5ShadowSample::onUpdateShadowRejected); subscribedToUpdateAccepted.get(); subscribedToUpdateRejected.get(); System.out.println("Subscribing to get responses..."); GetShadowSubscriptionRequest requestGetShadow = new GetShadowSubscriptionRequest(); requestGetShadow.thingName = input_thingName; CompletableFuture<Integer> subscribedToGetShadowAccepted = shadow.SubscribeToGetShadowAccepted( requestGetShadow, QualityOfService.AT_LEAST_ONCE, Mqtt5ShadowSample::onGetShadowAccepted); CompletableFuture<Integer> subscribedToGetShadowRejected = shadow.SubscribeToGetShadowRejected( requestGetShadow, QualityOfService.AT_LEAST_ONCE, Mqtt5ShadowSample::onGetShadowRejected); subscribedToGetShadowAccepted.get(); subscribedToGetShadowRejected.get(); gotResponse = new CompletableFuture<>(); System.out.println("Requesting current shadow state..."); GetShadowRequest getShadowRequest = new GetShadowRequest(); getShadowRequest.thingName = input_thingName; CompletableFuture<Integer> publishedGetShadow = shadow.PublishGetShadow( getShadowRequest, QualityOfService.AT_LEAST_ONCE); publishedGetShadow.get(); gotResponse.get(); // If this is not running in CI, then take input from the console if (isCI == false) { String newValue = ""; Scanner scanner = new Scanner(System.in); while (true) { System.out.print(SHADOW_PROPERTY + "> "); System.out.flush(); newValue = scanner.next(); if (newValue.compareToIgnoreCase("quit") == 0) { break; } gotResponse = new CompletableFuture<>(); changeShadowValue(newValue).get(); gotResponse.get(); } scanner.close(); } // If this is in running in CI, then automatically update the shadow else { int messages_sent = 0; String message_string = ""; while (messages_sent < 5) { gotResponse = new CompletableFuture<>(); message_string = "Shadow_Value_" + String.valueOf(messages_sent); changeShadowValue(message_string).get(); gotResponse.get(); messages_sent += 1; } } // Disconnect client.stop(null); try { lifecycleEvents.stoppedFuture.get(60, TimeUnit.SECONDS); } catch (Exception ex) { System.out.println("Exception encountered: " + ex.toString()); System.exit(1); } /* Close the client to free memory */ connection.close(); client.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { System.out.println("Exception encountered: " + ex.toString()); System.exit(1); } System.out.println("Complete!"); CrtResource.waitForNoResources(); } }
482
0
Create_ds/aws-iot-device-sdk-java-v2/samples/Shadow/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/Shadow/src/main/java/shadow/ShadowSample.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package shadow; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.MqttClientConnectionEvents; import software.amazon.awssdk.crt.mqtt.QualityOfService; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import software.amazon.awssdk.iot.iotshadow.IotShadowClient; import software.amazon.awssdk.iot.iotshadow.model.ErrorResponse; import software.amazon.awssdk.iot.iotshadow.model.GetShadowRequest; import software.amazon.awssdk.iot.iotshadow.model.GetShadowResponse; import software.amazon.awssdk.iot.iotshadow.model.GetShadowSubscriptionRequest; import software.amazon.awssdk.iot.iotshadow.model.ShadowDeltaUpdatedEvent; import software.amazon.awssdk.iot.iotshadow.model.ShadowDeltaUpdatedSubscriptionRequest; import software.amazon.awssdk.iot.iotshadow.model.ShadowState; import software.amazon.awssdk.iot.iotshadow.model.UpdateShadowRequest; import software.amazon.awssdk.iot.iotshadow.model.UpdateShadowResponse; import software.amazon.awssdk.iot.iotshadow.model.UpdateShadowSubscriptionRequest; import java.util.HashMap; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.Scanner; import utils.commandlineutils.CommandLineUtils; public class ShadowSample { // When run normally, we want to get input from the console // When run from CI, we want to automatically make changes to the shadow document static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static String input_thingName; final static String SHADOW_PROPERTY = "color"; final static String SHADOW_VALUE_DEFAULT = "off"; static MqttClientConnection connection; static IotShadowClient shadow; static String localValue = null; static CompletableFuture<Void> gotResponse; static CommandLineUtils cmdUtils; static void onGetShadowAccepted(GetShadowResponse response) { System.out.println("Received initial shadow state"); if (response.state != null && localValue == null) { gotResponse.complete(null); if (response.state.delta != null) { String value = response.state.delta.get(SHADOW_PROPERTY).toString(); System.out.println(" Shadow delta value: " + value); return; } if (response.state.reported != null) { String value = response.state.reported.get(SHADOW_PROPERTY).toString(); System.out.println(" Shadow reported value: " + value); // Initialize local value to match the reported shadow value localValue = value; return; } } System.out.println(" Shadow document has no value for " + SHADOW_PROPERTY + ". Setting default..."); changeShadowValue(SHADOW_VALUE_DEFAULT); } static void onGetShadowRejected(ErrorResponse response) { if (response.code == 404) { System.out.println("Thing has no shadow document. Creating with defaults..."); changeShadowValue(SHADOW_VALUE_DEFAULT); return; } gotResponse.complete(null); System.out.println("GetShadow request was rejected: code: " + response.code + " message: " + response.message); System.exit(1); } static void onShadowDeltaUpdated(ShadowDeltaUpdatedEvent response) { System.out.println("Shadow delta updated"); if (response.state != null && response.state.containsKey(SHADOW_PROPERTY)) { String value = response.state.get(SHADOW_PROPERTY).toString(); System.out.println(" Delta wants to change value to '" + value + "'. Changing local value..."); if (!response.clientToken.isEmpty()) { System.out.print(" ClientToken: " + response.clientToken + "\n"); } changeShadowValue(value); } else { System.out.println(" Delta did not report a change in " + SHADOW_PROPERTY); } } static void onUpdateShadowAccepted(UpdateShadowResponse response) { if (response.state.reported != null) { if (response.state.reported.containsKey(SHADOW_PROPERTY)) { String value = response.state.reported.get(SHADOW_PROPERTY).toString(); System.out.println("Shadow updated, value is " + value); } else { System.out.println("Shadow updated, value is Null"); } } else { if (response.state.reportedIsNullable == true) { System.out.println("Shadow updated, reported and desired is null"); } else { System.out.println("Shadow update, data cleared"); } } gotResponse.complete(null); } static void onUpdateShadowRejected(ErrorResponse response) { System.out.println("Shadow update was rejected: code: " + response.code + " message: " + response.message); System.exit(2); } static CompletableFuture<Void> changeShadowValue(String value) { if (localValue != null) { if (localValue.equals(value)) { System.out.println("Local value is already " + value); CompletableFuture<Void> result = new CompletableFuture<>(); result.complete(null); return result; } } System.out.println("Changed local value to " + value); localValue = value; System.out.println("Updating shadow value to " + value); // build a request to let the service know our current value and desired value, and that we only want // to update if the version matches the version we know about UpdateShadowRequest request = new UpdateShadowRequest(); request.thingName = input_thingName; request.state = new ShadowState(); if (value.compareToIgnoreCase("clear_shadow") == 0) { request.state.desiredIsNullable = true; request.state.reportedIsNullable = true; request.state.desired = null; request.state.reported = null; } else if (value.compareToIgnoreCase("null") == 0) { // A bit of a hack - we have to set reportedNullIsValid OR desiredNullIsValid // so the JSON formatter will allow null , otherwise null will always be // be converted to "null" // As long as we're passing a Hashmap that is NOT assigned to null, it will not // clear the data - so we pass an empty HashMap to avoid clearing data we want to keep request.state.desiredIsNullable = true; request.state.reportedIsNullable = false; // We will only clear desired, so we need to pass an empty HashMap for reported request.state.reported = new HashMap<String, Object>() {{}}; request.state.desired = new HashMap<String, Object>() {{ put(SHADOW_PROPERTY, null); }}; } else { request.state.reported = new HashMap<String, Object>() {{ put(SHADOW_PROPERTY, value); }}; request.state.desired = new HashMap<String, Object>() {{ put(SHADOW_PROPERTY, value); }}; } // Publish the request return shadow.PublishUpdateShadow(request, QualityOfService.AT_LEAST_ONCE).thenRun(() -> { System.out.println("Update request published"); }).exceptionally((ex) -> { System.out.println("Update request failed: " + ex.getMessage()); System.exit(3); return null; }); } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("Shadow", args); input_thingName = cmdData.input_thingName; MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; try { /** * Create the MQTT connection from the builder */ AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newMtlsBuilderFromPath(cmdData.input_cert, cmdData.input_key); if (cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short)cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); MqttClientConnection connection = builder.build(); builder.close(); // Create the shadow client shadow = new IotShadowClient(connection); // Connect CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "clean" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } /** * Subscribe to shadow topics */ System.out.println("Subscribing to shadow delta events..."); ShadowDeltaUpdatedSubscriptionRequest requestShadowDeltaUpdated = new ShadowDeltaUpdatedSubscriptionRequest(); requestShadowDeltaUpdated.thingName = input_thingName; CompletableFuture<Integer> subscribedToDeltas = shadow.SubscribeToShadowDeltaUpdatedEvents( requestShadowDeltaUpdated, QualityOfService.AT_LEAST_ONCE, ShadowSample::onShadowDeltaUpdated); subscribedToDeltas.get(); System.out.println("Subscribing to update responses..."); UpdateShadowSubscriptionRequest requestUpdateShadow = new UpdateShadowSubscriptionRequest(); requestUpdateShadow.thingName = input_thingName; CompletableFuture<Integer> subscribedToUpdateAccepted = shadow.SubscribeToUpdateShadowAccepted( requestUpdateShadow, QualityOfService.AT_LEAST_ONCE, ShadowSample::onUpdateShadowAccepted); CompletableFuture<Integer> subscribedToUpdateRejected = shadow.SubscribeToUpdateShadowRejected( requestUpdateShadow, QualityOfService.AT_LEAST_ONCE, ShadowSample::onUpdateShadowRejected); subscribedToUpdateAccepted.get(); subscribedToUpdateRejected.get(); System.out.println("Subscribing to get responses..."); GetShadowSubscriptionRequest requestGetShadow = new GetShadowSubscriptionRequest(); requestGetShadow.thingName = input_thingName; CompletableFuture<Integer> subscribedToGetShadowAccepted = shadow.SubscribeToGetShadowAccepted( requestGetShadow, QualityOfService.AT_LEAST_ONCE, ShadowSample::onGetShadowAccepted); CompletableFuture<Integer> subscribedToGetShadowRejected = shadow.SubscribeToGetShadowRejected( requestGetShadow, QualityOfService.AT_LEAST_ONCE, ShadowSample::onGetShadowRejected); subscribedToGetShadowAccepted.get(); subscribedToGetShadowRejected.get(); gotResponse = new CompletableFuture<>(); System.out.println("Requesting current shadow state..."); GetShadowRequest getShadowRequest = new GetShadowRequest(); getShadowRequest.thingName = input_thingName; CompletableFuture<Integer> publishedGetShadow = shadow.PublishGetShadow( getShadowRequest, QualityOfService.AT_LEAST_ONCE); publishedGetShadow.get(); gotResponse.get(); // If this is not running in CI, then take input from the console if (isCI == false) { String newValue = ""; Scanner scanner = new Scanner(System.in); while (true) { System.out.print(SHADOW_PROPERTY + "> "); System.out.flush(); newValue = scanner.next(); if (newValue.compareToIgnoreCase("quit") == 0) { break; } gotResponse = new CompletableFuture<>(); changeShadowValue(newValue).get(); gotResponse.get(); } scanner.close(); } // If this is in running in CI, then automatically update the shadow else { int messages_sent = 0; String message_string = ""; while (messages_sent < 5) { gotResponse = new CompletableFuture<>(); message_string = "Shadow_Value_" + String.valueOf(messages_sent); changeShadowValue(message_string).get(); gotResponse.get(); messages_sent += 1; } } // Disconnect CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); // Close the connection now that we are completely done with it. connection.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { System.out.println("Exception encountered: " + ex.toString()); System.exit(1); } System.out.println("Complete!"); CrtResource.waitForNoResources(); } }
483
0
Create_ds/aws-iot-device-sdk-java-v2/samples/CustomAuthorizerConnect/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/CustomAuthorizerConnect/src/main/java/customauthorizerconnect/CustomAuthorizerConnect.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package customauthorizerconnect; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.MqttClientConnectionEvents; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.io.UnsupportedEncodingException; import utils.commandlineutils.CommandLineUtils; public class CustomAuthorizerConnect { // When run normally, we want to exit nicely even if something goes wrong // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the exec:java task * When called otherwise, print what went wrong (if anything) and just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("CustomAuthorizerConnect execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("CustomAuthorizerConnect", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; try { /** * Create the MQTT connection from the builder */ AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newDefaultBuilder(); builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); builder.withCustomAuthorizer( cmdData.input_customAuthUsername, cmdData.input_customAuthorizerName, cmdData.input_customAuthorizerSignature, cmdData.input_customAuthPassword, cmdData.input_customAuthorizerTokenKeyName, cmdData.input_customAuthorizerTokenValue); MqttClientConnection connection = builder.build(); builder.close(); /** * Verify the connection was created */ if (connection == null) { onApplicationFailure(new RuntimeException("MQTT connection creation (through custom authorizer) failed!")); } /** * Connect and disconnect */ CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } System.out.println("Disconnecting..."); CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); System.out.println("Disconnected."); // Close the connection now that we are completely done with it. connection.close(); } catch (CrtRuntimeException | UnsupportedEncodingException | InterruptedException | ExecutionException ex) { onApplicationFailure(ex); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
484
0
Create_ds/aws-iot-device-sdk-java-v2/samples/X509CredentialsProviderConnect/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/X509CredentialsProviderConnect/src/main/java/x509credentialsproviderconnect/X509CredentialsProviderConnect.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package x509credentialsproviderconnect; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.MqttClientConnectionEvents; import software.amazon.awssdk.crt.http.HttpProxyOptions; import software.amazon.awssdk.crt.io.TlsContextOptions; import software.amazon.awssdk.crt.io.ClientTlsContext; import software.amazon.awssdk.crt.auth.credentials.X509CredentialsProvider; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import java.util.concurrent.ExecutionException; import java.util.concurrent.CompletableFuture; import utils.commandlineutils.CommandLineUtils; public class X509CredentialsProviderConnect { // When run normally, we want to exit nicely even if something goes wrong // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the exec:java task * When called otherwise, print what went wrong (if anything) and just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("BasicConnect execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("x509CredentialsProviderConnect", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; try { /** * Build the MQTT connection using the builder */ // ============================== AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newMtlsBuilderFromPath(null, null); if (cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short)cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); HttpProxyOptions proxyOptions = null; if (cmdData.input_proxyHost != "" && cmdData.input_proxyPort > 0) { proxyOptions = new HttpProxyOptions(); proxyOptions.setHost(cmdData.input_proxyHost); proxyOptions.setPort(cmdData.input_proxyPort); builder.withHttpProxyOptions(proxyOptions); } builder.withWebsockets(true); builder.withWebsocketSigningRegion(cmdData.input_signingRegion); TlsContextOptions x509TlsOptions = TlsContextOptions.createWithMtlsFromPath(cmdData.input_x509Cert, cmdData.input_x509Key); if (cmdData.input_x509Ca != null) { x509TlsOptions.withCertificateAuthorityFromPath(null, cmdData.input_x509Ca); } ClientTlsContext x509TlsContext = new ClientTlsContext(x509TlsOptions); X509CredentialsProvider.X509CredentialsProviderBuilder x509builder = new X509CredentialsProvider.X509CredentialsProviderBuilder() .withTlsContext(x509TlsContext) .withEndpoint(cmdData.input_x509Endpoint) .withRoleAlias(cmdData.input_x509Role) .withThingName(cmdData.input_x509ThingName) .withProxyOptions(proxyOptions); X509CredentialsProvider provider = x509builder.build(); builder.withWebsocketCredentialsProvider(provider); MqttClientConnection connection = builder.build(); builder.close(); provider.close(); if (connection == null) { onApplicationFailure(new RuntimeException("MQTT connection creation failed!")); } // ============================== /** * Connect and disconnect */ CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } System.out.println("Disconnecting..."); CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); System.out.println("Disconnected."); /** * Close the connection now that it is complete */ connection.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { onApplicationFailure(ex); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
485
0
Create_ds/aws-iot-device-sdk-java-v2/samples/BasicConnect/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/BasicConnect/src/main/java/basicconnect/BasicConnect.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package basicconnect; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.MqttClientConnectionEvents; import software.amazon.awssdk.crt.http.HttpProxyOptions; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import java.util.concurrent.ExecutionException; import java.util.concurrent.CompletableFuture; import utils.commandlineutils.CommandLineUtils; public class BasicConnect { // When run normally, we want to exit nicely even if something goes wrong. // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code. static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the exec:java task * When called otherwise, print what went wrong (if anything) and just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("BasicConnect execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("BasicConnect", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; try { /** * Create the MQTT connection from the builder */ AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newMtlsBuilderFromPath(cmdData.input_cert, cmdData.input_key); if (cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short)cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); if (cmdData.input_proxyHost != "" && cmdData.input_proxyPort > 0) { HttpProxyOptions proxyOptions = new HttpProxyOptions(); proxyOptions.setHost(cmdData.input_proxyHost); proxyOptions.setPort(cmdData.input_proxyPort); builder.withHttpProxyOptions(proxyOptions); } MqttClientConnection connection = builder.build(); builder.close(); /** * Verify the connection was created */ if (connection == null) { onApplicationFailure(new RuntimeException("MQTT connection creation failed!")); } /** * Connect and disconnect */ CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } System.out.println("Disconnecting..."); CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); System.out.println("Disconnected."); /** * Close the connection now that it is complete */ connection.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { onApplicationFailure(ex); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
486
0
Create_ds/aws-iot-device-sdk-java-v2/samples/GreengrassIPC/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/GreengrassIPC/src/main/java/greengrass/GreengrassIPC.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ /** * This sample uses AWS IoT Greengrass V2 to publish messages from the Greengrass device to the * AWS IoT MQTT broker. * * This sample can be deployed as a Greengrass V2 component and it will publish 10 MQTT messages * over the course of 10 seconds. The IPC integration with Greengrass V2 allows this code to run * without additional IoT certificates or secrets, because it directly communicates with the * Greengrass core running on the device. As such, to run this sample you need Greengrass Core running. * * For more information, see the samples README.md file at: * https://github.com/aws/aws-iot-device-sdk-python-v2/tree/main/samples */ package greengrass; import java.nio.charset.StandardCharsets; import java.time.*; import java.time.format.DateTimeFormatter; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import software.amazon.awssdk.aws.greengrass.GreengrassCoreIPCClientV2; import software.amazon.awssdk.aws.greengrass.model.PublishToIoTCoreRequest; import software.amazon.awssdk.aws.greengrass.model.PublishToIoTCoreResponse; import software.amazon.awssdk.aws.greengrass.model.QOS; class GreengrassIPC { // When run normally, we want to exit nicely even if something goes wrong // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); // Some constants for the payload data we send via IPC static double payloadBatteryStateCharge = 42.5; static double payloadLocationLongitude = 48.15743; static double payloadLocationLatitude = 11.57549; // The number of IPC messages to send static int sampleMessageCount = 10; static int sampleMessagesSent = 0; /* * When called during a CI run, throw an exception that will escape and fail the exec:java task * When called otherwise, print what went wrong (if anything) and just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("BasicPubSub execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } /** * A simple helper function to print a message when running locally (will not print in CI) */ static void logMessage(String message) { if (!isCI) { System.out.println(message); } } /** * A helper function to generate a JSON payload to send via IPC to simplify/separate sample code. */ public static String getIpcPayloadString() { // Get the current time as a formatted string String timestamp = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH:mm:ss").format(LocalDateTime.now()); // Construct a JSON string with the data StringBuilder builder = new StringBuilder(); builder.append("{"); builder.append("\"timestamp\":\"" + timestamp + "\","); builder.append("\"battery_state_of_charge\":" + payloadBatteryStateCharge + ","); builder.append("\"location\": {"); builder.append("\"longitude\":" + payloadLocationLongitude + ","); builder.append("\"latitude\":" + payloadLocationLatitude); builder.append("}"); builder.append("}"); return builder.toString(); } public static void main(String[] args) { logMessage("Greengrass IPC sample start"); // Create the Greengrass IPC client GreengrassCoreIPCClientV2 ipcClient = null; try { ipcClient = GreengrassCoreIPCClientV2.builder().build(); } catch (Exception ex) { logMessage("Failed to create Greengrass IPC client!"); onApplicationFailure(ex); System.exit(-1); } if (ipcClient == null) { logMessage("Failed to create Greengrass IPC client!"); onApplicationFailure(new Throwable("Error - IPC client not initialized!")); System.exit(-1); } // Create the topic name String topicNameFromEnv = System.getenv("AWS_IOT_THING_NAME"); if (topicNameFromEnv == null) { logMessage("Could not get IoT Thing name from AWS_IOT_THING_NAME. Using name 'TestThing'..."); topicNameFromEnv = "TestThing"; } String topicName = String.format("my/iot/%s/telementry", topicNameFromEnv); // Create the IPC request, except the payload. The payload will be created right before sending. PublishToIoTCoreRequest publishRequest = new PublishToIoTCoreRequest(); publishRequest.setQos(QOS.AT_LEAST_ONCE); publishRequest.setTopicName(topicName); try { logMessage("Will attempt to send " + sampleMessageCount + " IPC publishes to IoT Core"); while (sampleMessagesSent < sampleMessageCount) { logMessage("Sending message " + sampleMessagesSent++ + "..."); // Get the new IPC payload publishRequest.withPayload(getIpcPayloadString().getBytes(StandardCharsets.UTF_8)); CompletableFuture<PublishToIoTCoreResponse> publishFuture = ipcClient.publishToIoTCoreAsync(publishRequest); // Try to send the IPC message try { publishFuture.get(60, TimeUnit.SECONDS); logMessage("Successfully published IPC message to IoT Core"); } catch (Exception ex) { logMessage("Failed to publish IPC message to IoT Core"); } // Sleep for a second Thread.sleep(1000); } logMessage("All publishes sent. Finishing sample..."); ipcClient.close(); } catch (Exception ex) { logMessage("Something in Greengrass IPC sample failed by throwing an exception! Shutting down sample..."); onApplicationFailure(ex); try { ipcClient.close(); } catch (Exception closeEx) { onApplicationFailure(closeEx); } logMessage("Greengrass IPC sample finished with error"); System.exit(-1); } logMessage("Greengrass IPC sample finished"); System.exit(0); } }
487
0
Create_ds/aws-iot-device-sdk-java-v2/samples/Pkcs12Connect/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/Pkcs12Connect/src/main/java/pkcs12connect/Pkcs12Connect.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package pkcs12connect; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.MqttClientConnectionEvents; import software.amazon.awssdk.crt.http.HttpProxyOptions; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import java.util.concurrent.ExecutionException; import java.util.concurrent.CompletableFuture; import utils.commandlineutils.CommandLineUtils; public class Pkcs12Connect { // When run normally, we want to exit nicely even if something goes wrong. // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code. static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the exec:java task * When called otherwise, print what went wrong (if anything) and just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("Pkcs12Connect execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("Pkcs12Connect", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; try { /** * Create the MQTT connection from the builder */ AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newMtlsPkcs12Builder(cmdData.input_pkcs12File, cmdData.input_pkcs12Password); if (cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short)cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); MqttClientConnection connection = builder.build(); builder.close(); /** * Verify the connection was created */ if (connection == null) { onApplicationFailure(new RuntimeException("MQTT connection creation failed!")); } /** * Connect and disconnect */ CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } System.out.println("Disconnecting..."); CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); System.out.println("Disconnected."); /** * Close the connection now that it is complete */ connection.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { onApplicationFailure(ex); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
488
0
Create_ds/aws-iot-device-sdk-java-v2/samples/Utils/CommandLineUtils/utils
Create_ds/aws-iot-device-sdk-java-v2/samples/Utils/CommandLineUtils/utils/commandlineutils/CommandLineUtils.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package utils.commandlineutils; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.CompletableFuture; import java.io.UnsupportedEncodingException; import software.amazon.awssdk.crt.*; import software.amazon.awssdk.crt.io.*; import software.amazon.awssdk.crt.mqtt.*; import software.amazon.awssdk.crt.mqtt5.*; import software.amazon.awssdk.crt.mqtt5.packets.*; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import software.amazon.awssdk.iot.AwsIotMqtt5ClientBuilder; import software.amazon.awssdk.crt.http.HttpProxyOptions; import software.amazon.awssdk.crt.auth.credentials.X509CredentialsProvider; import software.amazon.awssdk.crt.auth.credentials.CognitoCredentialsProvider; import software.amazon.awssdk.crt.Log; import software.amazon.awssdk.crt.Log.LogLevel; public class CommandLineUtils { private String programName; private final HashMap<String, CommandLineOption> registeredCommands = new HashMap<>(); private List<String> commandArguments; private boolean isCI; /** * Functions for registering and command line arguments */ public void registerProgramName(String newProgramName) { programName = newProgramName; } public void registerCommand(CommandLineOption option) { if (registeredCommands.containsKey(option.commandName)) { System.out.println("Cannot register command: " + option.commandName + ". Command already registered"); return; } registeredCommands.put(option.commandName, option); } public void registerCommand(String commandName, String exampleInput, String helpOutput) { registerCommand(new CommandLineOption(commandName, exampleInput, helpOutput)); } public void removeCommand(String commandName) { registeredCommands.remove(commandName); } public void updateCommandHelp(String commandName, String newCommandHelp) { if (registeredCommands.containsKey(commandName)) { registeredCommands.get(commandName).helpOutput = newCommandHelp; } } public void sendArguments(String[] arguments) { // Automatically register the help command registerCommand(m_cmd_help, "", "Prints this message"); commandArguments = Arrays.asList(arguments); // Automatically check for help and print if present if (hasCommand(m_cmd_help)) { printHelp(); if (isCI == true) { throw new RuntimeException("Help argument called"); } else { System.exit(-1); } } } public boolean hasCommand(String command) { return commandArguments.contains("--" + command); } public String getCommand(String command) { for (Iterator<String> iter = commandArguments.iterator(); iter.hasNext();) { String value = iter.next(); if (Objects.equals(value,"--" + command)) { if (iter.hasNext()) { return iter.next(); } else { System.out.println("Error - found command but at end of arguments!\n"); return ""; } } } return ""; } public String getCommandOrDefault(String command, String commandDefault) { if (commandArguments.contains("--" + command)) { return getCommand(command); } return commandDefault; } public String getCommandRequired(String command) { if (commandArguments.contains("--" + command)) { return getCommand(command); } printHelp(); System.out.println("Missing required argument: --" + command + "\n"); if (isCI == true) { throw new RuntimeException("Missing required argument"); } else { System.exit(-1); } return ""; } public String getCommandRequired(String command, String commandAlt){ if(commandArguments.contains("--" + commandAlt)){ return getCommand(commandAlt); } return getCommandRequired(command); } public void printHelp() { System.out.println("Usage:"); String messageOne = programName; for (String commandName : registeredCommands.keySet()) { messageOne += " --" + commandName + " " + registeredCommands.get(commandName).exampleInput; } System.out.println(messageOne + "\n"); for (String commandName : registeredCommands.keySet()) { messageOne += " --" + commandName + " " + registeredCommands.get(commandName).exampleInput; System.out.println("* " + commandName + "\t\t" + registeredCommands.get(commandName).helpOutput); } } public void determineIfCI() { String ciPropValue = System.getProperty("aws.crt.ci"); isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); } /** * Helper functions for registering commands */ public void addCommonLoggingCommands() { registerCommand(m_cmd_verbosity, "<str>", "The amount of detail in the logging output of the sample." + " Options: 'Fatal', 'Error', 'Warn', 'Info', 'Debug', 'Trace' or 'None' (optional, default='None')."); registerCommand(m_cmd_log_destination, "<str>", "Where logging should be routed to." + " Options: 'Stdout', 'Stderr', 'File' (optional, default='Stderr')."); registerCommand(m_cmd_log_file_name, "<str>", "File name to save logging to." + " (optional, default='log.txt')."); } public void addClientIdAndPort() { registerCommand(m_cmd_client_id, "<int>", "Client id to use (optional, default='test-*')."); registerCommand(m_cmd_port, "<int>", "Port to connect to on the endpoint (optional, default='8883')."); } public void addCommonMQTTCommands() { registerCommand(m_cmd_endpoint, "<str>", "The endpoint of the mqtt server, not including a port."); registerCommand(m_cmd_ca_file, "<path>", "Path to AmazonRootCA1.pem (optional, system trust store used by default)."); } public void addCommonProxyCommands() { registerCommand(m_cmd_proxy_host, "<str>", "Websocket proxy host to use (optional, required if --proxy_port is set)."); registerCommand(m_cmd_proxy_port, "<int>", "Websocket proxy port to use (optional, default=8080, required if --proxy_host is set)."); } public void addCommonX509Commands() { registerCommand( m_cmd_x509_role, "<str>", "Role alias to use with the x509 credentials provider (required for x509)"); registerCommand(m_cmd_x509_endpoint, "<str>", "The credentials endpoint to fetch x509 credentials from (required for x509)"); registerCommand( m_cmd_x509_thing_name, "<str>", "Thing name to fetch x509 credentials on behalf of (required for x509)"); registerCommand( m_cmd_x509_cert_file, "<path>", "Path to the IoT thing certificate used in fetching x509 credentials (required for x509)"); registerCommand( m_cmd_x509_key_file, "<path>", "Path to the IoT thing private key used in fetching x509 credentials (required for x509)"); registerCommand( m_cmd_x509_ca_file, "<path>", "Path to the root certificate used in fetching x509 credentials (required for x509)"); } public void addCommonTopicMessageCommands() { registerCommand(m_cmd_message, "<str>", "The message to send in the payload (optional, default='Hello world!')"); registerCommand(m_cmd_topic, "<str>", "Topic to publish, subscribe to. (optional, default='test/topic')"); registerCommand(m_cmd_count, "<int>", "Number of messages to publish (optional, default='10')."); } public void addKeyAndCertCommands() { registerCommand(m_cmd_key_file, "<path>", "Path to your key in PEM format."); registerCommand(m_cmd_cert_file, "<path>", "Path to your client certificate in PEM format."); } /** * Helper functions for parsing commands */ private void parseCommonLoggingCommands(SampleCommandLineData returnData){ String verbosity = getCommandOrDefault(m_cmd_verbosity, "None"); String log_destination = getCommandOrDefault(m_cmd_log_destination, "Stderr"); String log_file_name = getCommandOrDefault(m_cmd_log_file_name, "log.txt"); if(verbosity != "None"){ switch (log_destination) { case "Stderr": Log.initLoggingToStderr(LogLevel.valueOf(verbosity)); break; case "Stdout": Log.initLoggingToStdout(LogLevel.valueOf(verbosity)); break; case "File": Log.initLoggingToFile(LogLevel.valueOf(verbosity), log_file_name); break; default: break; } } } private void parseCommonMQTTCommands(SampleCommandLineData returnData) { returnData.input_endpoint = getCommandRequired(m_cmd_endpoint); returnData.input_ca = getCommandOrDefault(m_cmd_ca_file, ""); } private void parseKeyAndCertCommands(SampleCommandLineData returnData) { returnData.input_cert = getCommandRequired(m_cmd_cert_file); returnData.input_key = getCommandRequired(m_cmd_key_file); } private void parseClientIdAndPort(SampleCommandLineData returnData) { returnData.input_clientId = getCommandOrDefault(m_cmd_client_id, "test-" + UUID.randomUUID().toString()); returnData.input_port = Integer.parseInt(getCommandOrDefault(m_cmd_port, "8883")); } private void parseCommonTopicMessageCommands(SampleCommandLineData returnData) { if (isCI == true) { returnData.input_topic = getCommandOrDefault(m_cmd_topic, "test/topic/" + UUID.randomUUID().toString()); returnData.input_message = getCommandOrDefault(m_cmd_message, "Hello World!"); } else { returnData.input_topic = getCommandOrDefault(m_cmd_topic, "test/topic"); returnData.input_message = getCommandOrDefault(m_cmd_message, "Hello World!"); } returnData.input_count = Integer.parseInt(getCommandOrDefault(m_cmd_count, "10")); } private void parseCommonProxyCommands(SampleCommandLineData returnData) { returnData.input_proxyHost = getCommandOrDefault(m_cmd_proxy_host, ""); returnData.input_proxyPort = Integer.parseInt(getCommandOrDefault(m_cmd_proxy_port, "0")); } private void parseCommonX509Commands(SampleCommandLineData returnData) { returnData.input_x509Endpoint = getCommandRequired(m_cmd_x509_endpoint); returnData.input_x509Role = getCommandRequired(m_cmd_x509_role); returnData.input_x509ThingName = getCommandRequired(m_cmd_x509_thing_name); returnData.input_x509Cert = getCommandRequired(m_cmd_x509_cert_file); returnData.input_x509Key = getCommandRequired(m_cmd_x509_key_file); returnData.input_x509Ca = getCommandOrDefault(m_cmd_x509_ca_file, null); } /** * Functions to register commands on a per-sample basis, as well as getting a struct containing all the data */ public class SampleCommandLineData { // General use public String input_endpoint; public String input_cert; public String input_key; public String input_ca; public String input_clientId; public int input_port; // Proxy public String input_proxyHost; public int input_proxyPort; // PubSub public String input_topic; public String input_message; public int input_count; // Websockets public String input_signingRegion; // Cognito public String input_cognitoIdentity; // Custom auth public String input_customAuthUsername; public String input_customAuthorizerName; public String input_customAuthorizerSignature; public String input_customAuthPassword; public String input_customAuthorizerTokenKeyName; public String input_customAuthorizerTokenValue; // Fleet provisioning public String input_templateName; public String input_templateParameters; public String input_csrPath; // Services (Shadow, Jobs, Greengrass, etc) public String input_thingName; public String input_mode; // Java Keystore public String input_keystore; public String input_keystorePassword; public String input_keystoreFormat; public String input_certificateAlias; public String input_certificatePassword; // Shared Subscription public String input_groupIdentifier; // PKCS#11 public String input_pkcs11LibPath; public String input_pkcs11UserPin; public String input_pkcs11TokenLabel; public Long input_pkcs11SlotId; public String input_pkcs11KeyLabel; // Raw Connect public String input_username; public String input_password; public String input_protocolName; public List<String> input_authParams; // X509 public String input_x509Endpoint; public String input_x509Role; public String input_x509ThingName; public String input_x509Cert; public String input_x509Key; public String input_x509Ca; // PKCS12 public String input_pkcs12File; public String input_pkcs12Password; // Greengrass Basic Discovery public Boolean inputPrintDiscoverRespOnly; } public SampleCommandLineData parseSampleInputBasicConnect(String[] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addCommonProxyCommands(); addKeyAndCertCommands(); addClientIdAndPort(); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseCommonProxyCommands(returnData); parseKeyAndCertCommands(returnData); parseClientIdAndPort(returnData); return returnData; } public SampleCommandLineData parseSampleInputPubSub(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addCommonTopicMessageCommands(); addKeyAndCertCommands(); addCommonProxyCommands(); addClientIdAndPort(); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseKeyAndCertCommands(returnData); parseCommonTopicMessageCommands(returnData); parseCommonProxyCommands(returnData); parseClientIdAndPort(returnData); return returnData; } public SampleCommandLineData parseSampleInputCognitoConnect(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); registerCommand(m_cmd_signing_region, "<str>", "AWS IoT service region."); registerCommand(m_cmd_client_id, "<int>", "Client id to use (optional, default='test-*')."); registerCommand(m_cmd_cognito_identity, "<str>", "The Cognito identity ID to use to connect via Cognito"); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); returnData.input_signingRegion = getCommandRequired(m_cmd_signing_region, m_cmd_region); returnData.input_clientId = getCommandOrDefault(m_cmd_client_id, "test-" + UUID.randomUUID().toString()); returnData.input_cognitoIdentity = getCommandRequired(m_cmd_cognito_identity); return returnData; } public SampleCommandLineData parseSampleInputCustomAuthorizerConnect(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); registerCommand(m_cmd_client_id, "<int>", "Client id to use (optional, default='test-*')."); registerCommand(m_cmd_custom_auth_username, "<str>", "Username for connecting to custom authorizer (optional, default=null)."); registerCommand(m_cmd_custom_auth_authorizer_name, "<str>", "Name of custom authorizer (optional, default=null)."); registerCommand(m_cmd_custom_auth_authorizer_signature, "<str>", "Signature passed when connecting to custom authorizer (optional, default=null)."); registerCommand(m_cmd_custom_auth_password, "<str>", "Password for connecting to custom authorizer (optional, default=null)."); registerCommand(m_cmd_custom_auth_token_key_name, "<str>", "Key used to extract the custom authorizer token (optional, default=null)."); registerCommand(m_cmd_custom_auth_token_value, "<str>", "The opaque token value for the custom authorizer (optional, default=null)."); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); returnData.input_clientId = getCommandOrDefault(m_cmd_client_id, "test-" + UUID.randomUUID().toString()); returnData.input_customAuthUsername = getCommandOrDefault(m_cmd_custom_auth_username, null); returnData.input_customAuthorizerName = getCommandOrDefault(m_cmd_custom_auth_authorizer_name, null); returnData.input_customAuthorizerSignature = getCommandOrDefault(m_cmd_custom_auth_authorizer_signature, null); returnData.input_customAuthPassword = getCommandOrDefault(m_cmd_custom_auth_password, null); returnData.input_customAuthorizerTokenKeyName = getCommandOrDefault(m_cmd_custom_auth_token_key_name, null); returnData.input_customAuthorizerTokenValue = getCommandOrDefault(m_cmd_custom_auth_token_value, null); return returnData; } public SampleCommandLineData parseSampleInputCustomKeyOpsConnect(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addKeyAndCertCommands(); addClientIdAndPort(); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseKeyAndCertCommands(returnData); parseClientIdAndPort(returnData); return returnData; } public SampleCommandLineData parseSampleInputFleetProvisioning(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addKeyAndCertCommands(); addClientIdAndPort(); registerCommand(m_cmd_fleet_template_name, "<str>", "Provisioning template name."); registerCommand(m_cmd_fleet_template_parameters, "<json>", "Provisioning template parameters."); registerCommand(m_cmd_fleet_template_csr, "<path>", "Path to the CSR file (optional)."); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseKeyAndCertCommands(returnData); parseClientIdAndPort(returnData); returnData.input_templateName = getCommandRequired(m_cmd_fleet_template_name); returnData.input_templateParameters = getCommandRequired(m_cmd_fleet_template_parameters); returnData.input_csrPath = getCommandOrDefault(m_cmd_fleet_template_csr, null); return returnData; } public SampleCommandLineData parseSampleInputGreengrassDiscovery(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); removeCommand(m_cmd_endpoint); addKeyAndCertCommands(); addCommonProxyCommands(); registerCommand(m_cmd_region, "<str>", "AWS IoT service region (optional, default='us-east-1')."); registerCommand(m_cmd_thing_name, "<str>", "The name of the IoT thing."); registerCommand(m_cmd_topic, "<str>", "Topic to subscribe/publish to (optional, default='test/topic')."); registerCommand(m_cmd_mode, "<str>", "Mode options: 'both', 'publish', or 'subscribe' (optional, default='both')."); registerCommand(m_cmd_print_discover_resp_only, "<str>", "Exists the sample after printing the discovery result (optional, default='False')"); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseKeyAndCertCommands(returnData); returnData.input_ca = getCommandOrDefault(m_cmd_ca_file, null); returnData.input_thingName = getCommandRequired(m_cmd_thing_name); returnData.input_signingRegion = getCommandRequired(m_cmd_region, m_cmd_signing_region); returnData.input_topic = getCommandOrDefault(m_cmd_topic, "test/topic"); returnData.input_mode = getCommandOrDefault(m_cmd_mode, "Hello World!"); returnData.input_proxyHost = getCommandOrDefault(m_cmd_proxy_host, ""); returnData.input_proxyPort = Integer.parseInt(getCommandOrDefault(m_cmd_proxy_port, "0")); returnData.inputPrintDiscoverRespOnly = hasCommand(m_cmd_print_discover_resp_only); return returnData; } public SampleCommandLineData parseSampleInputKeystoreConnect(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addCommonProxyCommands(); addClientIdAndPort(); registerCommand(m_cmd_javakeystore_path, "<file>", "The path to the Java keystore to use"); registerCommand(m_cmd_javakeystore_password, "<str>", "The password for the Java keystore"); registerCommand(m_cmd_javakeystore_format, "<str>", "The format of the Java keystore (optional, default='PKCS12')"); registerCommand(m_cmd_javakeystore_certificate, "<str>", "The certificate alias to use to access the key and certificate in the Java keystore"); registerCommand(m_cmd_javakeystore_key_password, "<str>", "The password associated with the key and certificate in the Java keystore"); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseCommonProxyCommands(returnData); parseClientIdAndPort(returnData); returnData.input_keystore = getCommandRequired(m_cmd_javakeystore_path); returnData.input_keystorePassword = getCommandRequired(m_cmd_javakeystore_password); returnData.input_keystoreFormat = getCommandOrDefault(m_cmd_javakeystore_format, "PKCS12"); returnData.input_certificateAlias = getCommandRequired(m_cmd_javakeystore_certificate); returnData.input_certificatePassword = getCommandRequired(m_cmd_javakeystore_key_password); return returnData; } public SampleCommandLineData parseSampleInputJobs(String[] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addKeyAndCertCommands(); addClientIdAndPort(); registerCommand(m_cmd_thing_name, "<str>", "The name of the IoT thing."); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseKeyAndCertCommands(returnData); parseClientIdAndPort(returnData); returnData.input_thingName = getCommandRequired(m_cmd_thing_name); return returnData; } public SampleCommandLineData parseSampleInputMqtt5PubSub(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addCommonTopicMessageCommands(); addKeyAndCertCommands(); addCommonProxyCommands(); addClientIdAndPort(); registerCommand(m_cmd_signing_region, "<string>", "Websocket region to use (will use websockets to connect if defined)."); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseCommonTopicMessageCommands(returnData); parseKeyAndCertCommands(returnData); parseCommonProxyCommands(returnData); parseClientIdAndPort(returnData); returnData.input_signingRegion = getCommandOrDefault(m_cmd_signing_region, null); return returnData; } public SampleCommandLineData parseSampleInputMqtt5SharedSubscription(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addCommonTopicMessageCommands(); addKeyAndCertCommands(); addCommonProxyCommands(); addClientIdAndPort(); registerCommand(m_cmd_group_identifier, "<string>", "The group identifier to use in the shared subscription (optional, default='java-sample')"); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseCommonTopicMessageCommands(returnData); parseKeyAndCertCommands(returnData); parseCommonProxyCommands(returnData); parseClientIdAndPort(returnData); returnData.input_groupIdentifier = getCommandOrDefault(m_cmd_group_identifier, "java-sample"); return returnData; } public SampleCommandLineData parseSampleInputPkcs11Connect(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addClientIdAndPort(); registerCommand(m_cmd_cert_file, "<path>", "Path to your client certificate in PEM format."); registerCommand(m_cmd_pkcs11_lib, "<path>", "Path to PKCS#11 library."); registerCommand(m_cmd_pkcs11_pin, "<int>", "User PIN for logging into PKCS#11 token."); registerCommand(m_cmd_pkcs11_token, "<str>", "Label of PKCS#11 token to use (optional)."); registerCommand(m_cmd_pkcs11_slot, "<int>", "Slot ID containing PKCS#11 token to use (optional)."); registerCommand(m_cmd_pkcs11_key, "<str>", "Label of private key on the PKCS#11 token (optional)."); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseClientIdAndPort(returnData); returnData.input_cert = getCommandRequired(m_cmd_cert_file); returnData.input_pkcs11LibPath = getCommandRequired(m_cmd_pkcs11_lib); returnData.input_pkcs11UserPin = getCommandRequired(m_cmd_pkcs11_pin); returnData.input_pkcs11TokenLabel = getCommandOrDefault(m_cmd_pkcs11_token, ""); returnData.input_pkcs11SlotId = null; if (hasCommand(m_cmd_pkcs11_slot)) { returnData.input_pkcs11SlotId = Long.parseLong(getCommandOrDefault(m_cmd_pkcs11_slot, "-1")); } returnData.input_pkcs11KeyLabel = getCommandOrDefault(m_cmd_pkcs11_key, ""); return returnData; } public SampleCommandLineData parseSampleInputShadow(String [] args) { // Shadow and Jobs use the same inputs currently return parseSampleInputJobs(args); } public SampleCommandLineData parseSampleInputWebsocketConnect(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addCommonProxyCommands(); registerCommand(m_cmd_signing_region, "<str>", "AWS IoT service region."); registerCommand(m_cmd_client_id, "<int>", "Client id to use (optional, default='test-*')."); registerCommand(m_cmd_port, "<int>", "Port to connect to on the endpoint (optional, default='443')."); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseCommonProxyCommands(returnData); returnData.input_signingRegion = getCommandRequired(m_cmd_signing_region, m_cmd_region); returnData.input_clientId = getCommandOrDefault(m_cmd_client_id, "test-" + UUID.randomUUID().toString()); returnData.input_port = Integer.parseInt(getCommandOrDefault(m_cmd_port, "443")); return returnData; } public SampleCommandLineData parseSampleInputWindowsCertConnect(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addClientIdAndPort(); registerCommand(m_cmd_cert_file, "<str>", "Path to certificate in Windows cert store. " + "e.g. \"CurrentUser\\MY\\6ac133ac58f0a88b83e9c794eba156a98da39b4c\""); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseClientIdAndPort(returnData); returnData.input_cert = getCommandRequired(m_cmd_cert_file); return returnData; } public SampleCommandLineData parseSampleInputX509Connect(String [] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addCommonProxyCommands(); addCommonX509Commands(); addClientIdAndPort(); registerCommand(m_cmd_signing_region, "<str>", "AWS IoT service region."); sendArguments(args); /** * Gather the input from the command line */ SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseCommonProxyCommands(returnData); parseCommonX509Commands(returnData); returnData.input_signingRegion = getCommandRequired(m_cmd_signing_region, m_cmd_region); returnData.input_clientId = getCommandOrDefault(m_cmd_client_id, "test-" + UUID.randomUUID().toString()); returnData.input_port = Integer.parseInt(getCommandOrDefault(m_cmd_port, "443")); return returnData; } public SampleCommandLineData parseSampleInputPkcs12Connect(String[] args) { addCommonLoggingCommands(); addCommonMQTTCommands(); addClientIdAndPort(); registerCommand(m_cmd_pkcs12_file, "<path>", "Path to your client PKCS12 certificate."); registerCommand(m_cmd_pkcs12_password, "<path>", "Path to your client certificate in PEM format."); sendArguments(args); SampleCommandLineData returnData = new SampleCommandLineData(); parseCommonLoggingCommands(returnData); parseCommonMQTTCommands(returnData); parseClientIdAndPort(returnData); returnData.input_pkcs12File = getCommandRequired(m_cmd_pkcs12_file); returnData.input_pkcs12Password = getCommandRequired(m_cmd_pkcs12_password); return returnData; } /** * Based on the sample string: sets up the arguments, parses the arguments, and returns the command line data all in one go */ public static SampleCommandLineData getInputForIoTSample(String sampleName, String[] args) { CommandLineUtils cmdUtils = new CommandLineUtils(); cmdUtils.registerProgramName(sampleName); cmdUtils.determineIfCI(); if (sampleName.equals("BasicConnect")) { return cmdUtils.parseSampleInputBasicConnect(args); } else if (sampleName.equals("PubSub")) { return cmdUtils.parseSampleInputPubSub(args); } else if (sampleName.equals("CognitoConnect")) { return cmdUtils.parseSampleInputCognitoConnect(args); } else if (sampleName.equals("CustomAuthorizerConnect")) { return cmdUtils.parseSampleInputCustomAuthorizerConnect(args); } else if (sampleName.equals("CustomKeyOpsConnect")) { return cmdUtils.parseSampleInputCustomKeyOpsConnect(args); } else if (sampleName.equals("FleetProvisioningSample")) { return cmdUtils.parseSampleInputFleetProvisioning(args); } else if (sampleName.equals("BasicDiscovery")) { return cmdUtils.parseSampleInputGreengrassDiscovery(args); } else if (sampleName.equals("JavaKeystoreConnect")) { return cmdUtils.parseSampleInputKeystoreConnect(args); } else if (sampleName.equals("Jobs")) { return cmdUtils.parseSampleInputJobs(args); } else if (sampleName.equals("Mqtt5PubSub")) { return cmdUtils.parseSampleInputMqtt5PubSub(args); } else if (sampleName.equals("Mqtt5SharedSubscription")) { return cmdUtils.parseSampleInputMqtt5SharedSubscription(args); } else if (sampleName.equals("Pkcs11Connect")) { return cmdUtils.parseSampleInputPkcs11Connect(args); } else if (sampleName.equals("Shadow")) { return cmdUtils.parseSampleInputShadow(args); } else if (sampleName.equals("WebsocketConnect")) { return cmdUtils.parseSampleInputWebsocketConnect(args); } else if (sampleName.equals("WindowsCertConnect")) { return cmdUtils.parseSampleInputWindowsCertConnect(args); } else if (sampleName.equals("x509CredentialsProviderConnect")) { return cmdUtils.parseSampleInputX509Connect(args); } else if (sampleName.equals("Pkcs12Connect")) { return cmdUtils.parseSampleInputPkcs12Connect(args); } else { throw new RuntimeException("Unknown sample name!"); } } /** * Constants for commonly used/needed commands */ private static final String m_cmd_log_destination = "log_destination"; private static final String m_cmd_log_file_name = "log_file_name"; private static final String m_cmd_verbosity = "verbosity"; private static final String m_cmd_endpoint = "endpoint"; private static final String m_cmd_ca_file = "ca_file"; private static final String m_cmd_cert_file = "cert"; private static final String m_cmd_key_file = "key"; private static final String m_cmd_client_id = "client_id"; private static final String m_cmd_port = "port"; private static final String m_cmd_proxy_host = "proxy_host"; private static final String m_cmd_proxy_port = "proxy_port"; private static final String m_cmd_signing_region = "signing_region"; private static final String m_cmd_x509_endpoint = "x509_endpoint"; private static final String m_cmd_x509_role = "x509_role_alias"; private static final String m_cmd_x509_thing_name = "x509_thing_name"; private static final String m_cmd_x509_cert_file = "x509_cert"; private static final String m_cmd_x509_key_file = "x509_key"; private static final String m_cmd_x509_ca_file = "x509_ca_file"; private static final String m_cmd_pkcs11_lib = "pkcs11_lib"; private static final String m_cmd_pkcs11_cert = "cert"; private static final String m_cmd_pkcs11_pin = "pin"; private static final String m_cmd_pkcs11_token = "token_label"; private static final String m_cmd_pkcs11_slot = "slot_id"; private static final String m_cmd_pkcs11_key = "key_label"; private static final String m_cmd_message = "message"; private static final String m_cmd_topic = "topic"; private static final String m_cmd_help = "help"; private static final String m_cmd_custom_auth_username = "custom_auth_username"; private static final String m_cmd_custom_auth_authorizer_name = "custom_auth_authorizer_name"; private static final String m_cmd_custom_auth_authorizer_signature = "custom_auth_authorizer_signature"; private static final String m_cmd_custom_auth_password = "custom_auth_password"; private static final String m_cmd_custom_auth_token_key_name = "custom_auth_token_key_name"; private static final String m_cmd_custom_auth_token_value = "custom_auth_token_value"; private static final String m_cmd_javakeystore_path = "keystore"; private static final String m_cmd_javakeystore_password = "keystore_password"; private static final String m_cmd_javakeystore_format = "keystore_format"; private static final String m_cmd_javakeystore_certificate = "certificate_alias"; private static final String m_cmd_javakeystore_key_password = "certificate_password"; private static final String m_cmd_cognito_identity = "cognito_identity"; private static final String m_cmd_count = "count"; private static final String m_cmd_fleet_template_name = "template_name"; private static final String m_cmd_fleet_template_parameters = "template_parameters"; private static final String m_cmd_fleet_template_csr = "csr"; private static final String m_cmd_thing_name = "thing_name"; private static final String m_cmd_mode = "mode"; private static final String m_cmd_group_identifier = "group_identifier"; private static final String m_cmd_username = "username"; private static final String m_cmd_password = "password"; private static final String m_cmd_protocol = "protocol"; private static final String m_cmd_auth_params = "auth_params"; private static final String m_cmd_pkcs12_file = "pkcs12_file"; private static final String m_cmd_pkcs12_password = "pkcs12_password"; private static final String m_cmd_region = "region"; private static final String m_cmd_print_discover_resp_only = "print_discover_resp_only"; } class CommandLineOption { public String commandName; public String exampleInput; public String helpOutput; CommandLineOption(String name, String example, String help) { commandName = name; exampleInput = example; helpOutput = help; } }
489
0
Create_ds/aws-iot-device-sdk-java-v2/samples/BasicPubSub/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/BasicPubSub/src/main/java/pubsub/PubSub.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package pubsub; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.http.HttpProxyOptions; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.MqttClientConnectionEvents; import software.amazon.awssdk.crt.mqtt.MqttMessage; import software.amazon.awssdk.crt.mqtt.QualityOfService; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import java.nio.charset.StandardCharsets; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import utils.commandlineutils.CommandLineUtils; public class PubSub { // When run normally, we want to exit nicely even if something goes wrong // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the exec:java task * When called otherwise, print what went wrong (if anything) and just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("BasicPubSub execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("PubSub", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; try { /** * Create the MQTT connection from the builder */ AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newMtlsBuilderFromPath(cmdData.input_cert, cmdData.input_key); if (cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short)cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); if (cmdData.input_proxyHost != "" && cmdData.input_proxyPort > 0) { HttpProxyOptions proxyOptions = new HttpProxyOptions(); proxyOptions.setHost(cmdData.input_proxyHost); proxyOptions.setPort(cmdData.input_proxyPort); builder.withHttpProxyOptions(proxyOptions); } MqttClientConnection connection = builder.build(); builder.close(); // Connect the MQTT client CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } // Subscribe to the topic CountDownLatch countDownLatch = new CountDownLatch(cmdData.input_count); CompletableFuture<Integer> subscribed = connection.subscribe(cmdData.input_topic, QualityOfService.AT_LEAST_ONCE, (message) -> { String payload = new String(message.getPayload(), StandardCharsets.UTF_8); System.out.println("MESSAGE: " + payload); countDownLatch.countDown(); }); subscribed.get(); // Publish to the topic int count = 0; while (count++ < cmdData.input_count) { CompletableFuture<Integer> published = connection.publish(new MqttMessage(cmdData.input_topic, cmdData.input_message.getBytes(), QualityOfService.AT_LEAST_ONCE, false)); published.get(); Thread.sleep(1000); } countDownLatch.await(); // Disconnect CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); // Close the connection now that we are completely done with it. connection.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { onApplicationFailure(ex); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
490
0
Create_ds/aws-iot-device-sdk-java-v2/samples/CognitoConnect/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/CognitoConnect/src/main/java/cognitoconnect/CognitoConnect.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package cognitoconnect; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.MqttClientConnectionEvents; import software.amazon.awssdk.crt.http.HttpProxyOptions; import software.amazon.awssdk.crt.auth.credentials.CognitoCredentialsProvider; import software.amazon.awssdk.crt.io.ClientBootstrap; import software.amazon.awssdk.crt.io.TlsContextOptions; import software.amazon.awssdk.crt.io.ClientTlsContext; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import utils.commandlineutils.CommandLineUtils; public class CognitoConnect { // When run normally, we want to exit nicely even if something goes wrong // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the exec:java task * When called otherwise, print what went wrong (if anything) and just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("CognitoConnect execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("CognitoConnect", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; try { /** * Creates a connection using Cognito credentials. * * Note: This sample and code assumes that you are using a Cognito identity * in the same region as you pass to "--signing_region". * If not, you may need to adjust the Cognito endpoint in the cmdUtils. * See https://docs.aws.amazon.com/general/latest/gr/cognito_identity.html * for all Cognito endpoints. */ // ================================= AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newMtlsBuilderFromPath(null, null); builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); builder.withWebsockets(true); builder.withWebsocketSigningRegion(cmdData.input_signingRegion); CognitoCredentialsProvider.CognitoCredentialsProviderBuilder cognitoBuilder = new CognitoCredentialsProvider.CognitoCredentialsProviderBuilder(); String cognitoEndpoint = "cognito-identity." + cmdData.input_signingRegion + ".amazonaws.com"; cognitoBuilder.withEndpoint(cognitoEndpoint).withIdentity(cmdData.input_cognitoIdentity); cognitoBuilder.withClientBootstrap(ClientBootstrap.getOrCreateStaticDefault()); TlsContextOptions cognitoTlsContextOptions = TlsContextOptions.createDefaultClient(); ClientTlsContext cognitoTlsContext = new ClientTlsContext(cognitoTlsContextOptions); cognitoTlsContextOptions.close(); cognitoBuilder.withTlsContext(cognitoTlsContext); CognitoCredentialsProvider cognitoCredentials = cognitoBuilder.build(); builder.withWebsocketCredentialsProvider(cognitoCredentials); MqttClientConnection connection = builder.build(); builder.close(); cognitoCredentials.close(); cognitoTlsContext.close(); if (connection == null) { onApplicationFailure(new RuntimeException("MQTT connection creation failed!")); } // ================================= /** * Connect and disconnect */ CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } System.out.println("Disconnecting..."); CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); System.out.println("Disconnected."); /** * Close the connection now that it is complete */ connection.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { onApplicationFailure(ex); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
491
0
Create_ds/aws-iot-device-sdk-java-v2/samples/JavaKeystoreConnect/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/JavaKeystoreConnect/src/main/java/javakeystoreconnect/JavaKeystoreConnect.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package javakeystoreconnect; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.MqttClientConnectionEvents; import software.amazon.awssdk.crt.http.HttpProxyOptions; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import java.util.concurrent.ExecutionException; import java.util.concurrent.CompletableFuture; import utils.commandlineutils.CommandLineUtils; public class JavaKeystoreConnect { // When run normally, we want to exit nicely even if something goes wrong // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the exec:java task * When called otherwise, print what went wrong (if anything) and just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("JavaKeystoreConnect execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("JavaKeystoreConnect", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; try { /** * Create the MQTT connection from the builder */ java.security.KeyStore keyStore; try { keyStore = java.security.KeyStore.getInstance(cmdData.input_keystoreFormat); } catch (java.security.KeyStoreException ex) { throw new CrtRuntimeException("Could not get instance of Java keystore with format " + cmdData.input_keystoreFormat); } try (java.io.FileInputStream fileInputStream = new java.io.FileInputStream(cmdData.input_keystore)) { keyStore.load(fileInputStream, cmdData.input_keystorePassword.toCharArray()); } catch (java.io.FileNotFoundException ex) { throw new CrtRuntimeException("Could not open Java keystore file"); } catch (java.io.IOException | java.security.NoSuchAlgorithmException | java.security.cert.CertificateException ex) { throw new CrtRuntimeException("Could not load Java keystore"); } AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newJavaKeystoreBuilder( keyStore, cmdData.input_certificateAlias, cmdData.input_certificatePassword); if (cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short)cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); if (cmdData.input_proxyHost != "" && cmdData.input_proxyPort > 0) { HttpProxyOptions proxyOptions = new HttpProxyOptions(); proxyOptions.setHost(cmdData.input_proxyHost); proxyOptions.setPort(cmdData.input_proxyPort); builder.withHttpProxyOptions(proxyOptions); } MqttClientConnection connection = builder.build(); builder.close(); /** * Verify the connection was created */ if (connection == null) { onApplicationFailure(new RuntimeException("MQTT connection creation failed!")); } /** * Connect and disconnect */ CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } System.out.println("Disconnecting..."); CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); System.out.println("Disconnected."); /** * Close the connection now that it is complete */ connection.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { onApplicationFailure(ex); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
492
0
Create_ds/aws-iot-device-sdk-java-v2/samples/WebsocketConnect/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/WebsocketConnect/src/main/java/websocketconnect/WebsocketConnect.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package websocketconnect; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.MqttClientConnectionEvents; import software.amazon.awssdk.crt.http.HttpProxyOptions; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import java.util.concurrent.ExecutionException; import java.util.concurrent.CompletableFuture; import utils.commandlineutils.CommandLineUtils; public class WebsocketConnect { // When run normally, we want to exit nicely even if something goes wrong // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the exec:java task * When called otherwise, print what went wrong (if anything) and just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("WebsocketConnect execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("WebsocketConnect", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; try { /** * Create the MQTT connection from the builder */ AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newMtlsBuilderFromPath(null, null); if (cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short)cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); if (cmdData.input_proxyHost != "" && cmdData.input_proxyPort > 0) { HttpProxyOptions proxyOptions = new HttpProxyOptions(); proxyOptions.setHost(cmdData.input_proxyHost); proxyOptions.setPort(cmdData.input_proxyPort); builder.withHttpProxyOptions(proxyOptions); } builder.withWebsockets(true); builder.withWebsocketSigningRegion(cmdData.input_signingRegion); MqttClientConnection connection = builder.build(); builder.close(); /** * Verify the connection was created */ if (connection == null) { onApplicationFailure(new RuntimeException("MQTT connection creation failed!")); } /** * Connect and disconnect */ CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } System.out.println("Disconnecting..."); CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); System.out.println("Disconnected."); // Close the connection now that we are completely done with it. connection.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { onApplicationFailure(ex); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
493
0
Create_ds/aws-iot-device-sdk-java-v2/samples/WindowsCertConnect/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/WindowsCertConnect/src/main/java/windowscertconnect/WindowsCertConnect.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package windowscertconnect; import software.amazon.awssdk.crt.*; import software.amazon.awssdk.crt.io.*; import software.amazon.awssdk.crt.mqtt.*; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import utils.commandlineutils.CommandLineUtils; public class WindowsCertConnect { // When run normally, we want to exit nicely even if something goes wrong // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the * exec:java task When called otherwise, print what went wrong (if anything) and * just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("WindowsCertConnect", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; try { AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newMtlsWindowsCertStorePathBuilder(cmdData.input_cert); if (cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short)cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); MqttClientConnection connection = builder.build(); builder.close(); /** * Verify the connection was created */ if (connection == null) { onApplicationFailure(new RuntimeException("MQTT connection creation failed!")); } /** * Connect and disconnect */ CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } System.out.println("Disconnecting..."); CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); System.out.println("Disconnected."); /** * Close the connection now that it is complete */ connection.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { onApplicationFailure(ex); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
494
0
Create_ds/aws-iot-device-sdk-java-v2/samples/Pkcs11Connect/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/Pkcs11Connect/src/main/java/pkcs11connect/Pkcs11Connect.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package pkcs11connect; import software.amazon.awssdk.crt.*; import software.amazon.awssdk.crt.io.*; import software.amazon.awssdk.crt.mqtt.*; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import utils.commandlineutils.CommandLineUtils; public class Pkcs11Connect { // When run normally, we want to exit nicely even if something goes wrong // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the * exec:java task When called otherwise, print what went wrong (if anything) and * just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("Pkcs11Connect execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("Pkcs11Connect", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; // Load PKCS#11 library try (Pkcs11Lib pkcs11Lib = new Pkcs11Lib(cmdData.input_pkcs11LibPath); TlsContextPkcs11Options pkcs11Options = new TlsContextPkcs11Options(pkcs11Lib)) { pkcs11Options.withCertificateFilePath(cmdData.input_cert); pkcs11Options.withUserPin(cmdData.input_pkcs11UserPin); // Pass arguments to help find the correct PKCS#11 token, // and the private key on that token. You don't need to pass // any of these arguments if your PKCS#11 device only has one // token, or the token only has one private key. But if there // are multiple tokens, or multiple keys to choose from, you // must narrow down which one should be used. if (cmdData.input_pkcs11TokenLabel != null && cmdData.input_pkcs11TokenLabel != "") { pkcs11Options.withTokenLabel(cmdData.input_pkcs11TokenLabel); } if (cmdData.input_pkcs11SlotId != null) { pkcs11Options.withSlotId(cmdData.input_pkcs11SlotId); } if (cmdData.input_pkcs11KeyLabel != null && cmdData.input_pkcs11KeyLabel != "") { pkcs11Options.withPrivateKeyObjectLabel(cmdData.input_pkcs11KeyLabel); } try (AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder .newMtlsPkcs11Builder(pkcs11Options)) { if (cmdData.input_ca != null && cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short) cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); try (MqttClientConnection connection = builder.build()) { CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } System.out.println("Disconnecting..."); CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); System.out.println("Disconnected."); // Close the connection now that we are completely done with it. connection.close(); } } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { onApplicationFailure(ex); } } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
495
0
Create_ds/aws-iot-device-sdk-java-v2/samples/Jobs/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/Jobs/src/main/java/jobs/JobsSample.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package jobs; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt.MqttClientConnectionEvents; import software.amazon.awssdk.crt.mqtt.QualityOfService; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import software.amazon.awssdk.iot.iotjobs.IotJobsClient; import software.amazon.awssdk.iot.iotjobs.model.DescribeJobExecutionRequest; import software.amazon.awssdk.iot.iotjobs.model.DescribeJobExecutionResponse; import software.amazon.awssdk.iot.iotjobs.model.DescribeJobExecutionSubscriptionRequest; import software.amazon.awssdk.iot.iotjobs.model.GetPendingJobExecutionsRequest; import software.amazon.awssdk.iot.iotjobs.model.GetPendingJobExecutionsResponse; import software.amazon.awssdk.iot.iotjobs.model.GetPendingJobExecutionsSubscriptionRequest; import software.amazon.awssdk.iot.iotjobs.model.JobExecutionSummary; import software.amazon.awssdk.iot.iotjobs.model.JobStatus; import software.amazon.awssdk.iot.iotjobs.model.RejectedError; import software.amazon.awssdk.iot.iotjobs.model.StartNextJobExecutionResponse; import software.amazon.awssdk.iot.iotjobs.model.StartNextPendingJobExecutionRequest; import software.amazon.awssdk.iot.iotjobs.model.StartNextPendingJobExecutionSubscriptionRequest; import software.amazon.awssdk.iot.iotjobs.model.UpdateJobExecutionRequest; import software.amazon.awssdk.iot.iotjobs.model.UpdateJobExecutionSubscriptionRequest; import java.util.LinkedList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import utils.commandlineutils.CommandLineUtils; public class JobsSample { // When run normally, we want to check for jobs and process them // When run from CI, we want to just check for jobs static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); static CompletableFuture<Void> gotResponse; static List<String> availableJobs = new LinkedList<>(); static String currentJobId; static long currentExecutionNumber = 0; static int currentVersionNumber = 0; static CommandLineUtils cmdUtils; static void onRejectedError(RejectedError error) { System.out.println("Request rejected: " + error.code.toString() + ": " + error.message); System.exit(1); } static void onGetPendingJobExecutionsAccepted(GetPendingJobExecutionsResponse response) { System.out.println("Pending Jobs: " + (response.queuedJobs.size() + response.inProgressJobs.size() == 0 ? "none" : "")); for (JobExecutionSummary job : response.inProgressJobs) { availableJobs.add(job.jobId); System.out.println(" In Progress: " + job.jobId + " @ " + job.lastUpdatedAt.toString()); } for (JobExecutionSummary job : response.queuedJobs) { availableJobs.add(job.jobId); System.out.println(" " + job.jobId + " @ " + job.lastUpdatedAt.toString()); } gotResponse.complete(null); } static void onDescribeJobExecutionAccepted(DescribeJobExecutionResponse response) { System.out.println("Describe Job: " + response.execution.jobId + " version: " + response.execution.versionNumber); if (response.execution.jobDocument != null) { response.execution.jobDocument.forEach((key, value) -> { System.out.println(" " + key + ": " + value); }); } gotResponse.complete(null); } static void onStartNextPendingJobExecutionAccepted(StartNextJobExecutionResponse response) { System.out.println("Start Job: " + response.execution.jobId); currentJobId = response.execution.jobId; currentExecutionNumber = response.execution.executionNumber; currentVersionNumber = response.execution.versionNumber; gotResponse.complete(null); } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("Jobs", args); MqttClientConnectionEvents callbacks = new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { if (errorCode != 0) { System.out.println("Connection interrupted: " + errorCode + ": " + CRT.awsErrorString(errorCode)); } } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed: " + (sessionPresent ? "existing session" : "clean session")); } }; try { /** * Create the MQTT connection from the builder */ AwsIotMqttConnectionBuilder builder = AwsIotMqttConnectionBuilder.newMtlsBuilderFromPath(cmdData.input_cert, cmdData.input_key); if (cmdData.input_ca != "") { builder.withCertificateAuthorityFromPath(null, cmdData.input_ca); } builder.withConnectionEventCallbacks(callbacks) .withClientId(cmdData.input_clientId) .withEndpoint(cmdData.input_endpoint) .withPort((short)cmdData.input_port) .withCleanSession(true) .withProtocolOperationTimeoutMs(60000); MqttClientConnection connection = builder.build(); builder.close(); IotJobsClient jobs = new IotJobsClient(connection); CompletableFuture<Boolean> connected = connection.connect(); try { boolean sessionPresent = connected.get(); System.out.println("Connected to " + (!sessionPresent ? "new" : "existing") + " session!"); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } { gotResponse = new CompletableFuture<>(); GetPendingJobExecutionsSubscriptionRequest subscriptionRequest = new GetPendingJobExecutionsSubscriptionRequest(); subscriptionRequest.thingName = cmdData.input_thingName; CompletableFuture<Integer> subscribed = jobs.SubscribeToGetPendingJobExecutionsAccepted( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, JobsSample::onGetPendingJobExecutionsAccepted); try { subscribed.get(); System.out.println("Subscribed to GetPendingJobExecutionsAccepted"); } catch (Exception ex) { throw new RuntimeException("Failed to subscribe to GetPendingJobExecutions", ex); } subscribed = jobs.SubscribeToGetPendingJobExecutionsRejected( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, JobsSample::onRejectedError); subscribed.get(); System.out.println("Subscribed to GetPendingJobExecutionsRejected"); GetPendingJobExecutionsRequest publishRequest = new GetPendingJobExecutionsRequest(); publishRequest.thingName = cmdData.input_thingName; CompletableFuture<Integer> published = jobs.PublishGetPendingJobExecutions( publishRequest, QualityOfService.AT_LEAST_ONCE); try { published.get(); gotResponse.get(); } catch (Exception ex) { throw new RuntimeException("Exception occurred during publish", ex); } } if (availableJobs.isEmpty()) { System.out.println("No jobs queued, no further work to do"); // If sample is running in CI, there should be at least one job if (isCI == true) { throw new RuntimeException("No jobs queued in CI! At least one job should be queued!"); } } for (String jobId : availableJobs) { gotResponse = new CompletableFuture<>(); DescribeJobExecutionSubscriptionRequest subscriptionRequest = new DescribeJobExecutionSubscriptionRequest(); subscriptionRequest.thingName = cmdData.input_thingName; subscriptionRequest.jobId = jobId; jobs.SubscribeToDescribeJobExecutionAccepted( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, JobsSample::onDescribeJobExecutionAccepted); jobs.SubscribeToDescribeJobExecutionRejected( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, JobsSample::onRejectedError); DescribeJobExecutionRequest publishRequest = new DescribeJobExecutionRequest(); publishRequest.thingName = cmdData.input_thingName; publishRequest.jobId = jobId; publishRequest.includeJobDocument = true; publishRequest.executionNumber = 1L; jobs.PublishDescribeJobExecution(publishRequest, QualityOfService.AT_LEAST_ONCE); gotResponse.get(); } // If sample is not running in CI, then process the available jobs. if (isCI == false) { for (int jobIdx = 0; jobIdx < availableJobs.size(); ++jobIdx) { { gotResponse = new CompletableFuture<>(); // Start the next pending job StartNextPendingJobExecutionSubscriptionRequest subscriptionRequest = new StartNextPendingJobExecutionSubscriptionRequest(); subscriptionRequest.thingName = cmdData.input_thingName; jobs.SubscribeToStartNextPendingJobExecutionAccepted( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, JobsSample::onStartNextPendingJobExecutionAccepted); jobs.SubscribeToStartNextPendingJobExecutionRejected( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, JobsSample::onRejectedError); StartNextPendingJobExecutionRequest publishRequest = new StartNextPendingJobExecutionRequest(); publishRequest.thingName = cmdData.input_thingName; publishRequest.stepTimeoutInMinutes = 15L; jobs.PublishStartNextPendingJobExecution(publishRequest, QualityOfService.AT_LEAST_ONCE); gotResponse.get(); } { // Update the service to let it know we're executing gotResponse = new CompletableFuture<>(); UpdateJobExecutionSubscriptionRequest subscriptionRequest = new UpdateJobExecutionSubscriptionRequest(); subscriptionRequest.thingName = cmdData.input_thingName; subscriptionRequest.jobId = currentJobId; jobs.SubscribeToUpdateJobExecutionAccepted( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, (response) -> { System.out.println("Marked job " + currentJobId + " IN_PROGRESS"); gotResponse.complete(null); }); jobs.SubscribeToUpdateJobExecutionRejected( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, JobsSample::onRejectedError); UpdateJobExecutionRequest publishRequest = new UpdateJobExecutionRequest(); publishRequest.thingName = cmdData.input_thingName; publishRequest.jobId = currentJobId; publishRequest.executionNumber = currentExecutionNumber; publishRequest.status = JobStatus.IN_PROGRESS; publishRequest.expectedVersion = currentVersionNumber++; jobs.PublishUpdateJobExecution(publishRequest, QualityOfService.AT_LEAST_ONCE); gotResponse.get(); } // Fake doing something Thread.sleep(1000); { // Update the service to let it know we're done gotResponse = new CompletableFuture<>(); UpdateJobExecutionSubscriptionRequest subscriptionRequest = new UpdateJobExecutionSubscriptionRequest(); subscriptionRequest.thingName = cmdData.input_thingName; subscriptionRequest.jobId = currentJobId; jobs.SubscribeToUpdateJobExecutionAccepted( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, (response) -> { System.out.println("Marked job " + currentJobId + " SUCCEEDED"); gotResponse.complete(null); }); jobs.SubscribeToUpdateJobExecutionRejected( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, JobsSample::onRejectedError); UpdateJobExecutionRequest publishRequest = new UpdateJobExecutionRequest(); publishRequest.thingName = cmdData.input_thingName; publishRequest.jobId = currentJobId; publishRequest.executionNumber = currentExecutionNumber; publishRequest.status = JobStatus.SUCCEEDED; publishRequest.expectedVersion = currentVersionNumber++; jobs.PublishUpdateJobExecution(publishRequest, QualityOfService.AT_LEAST_ONCE); gotResponse.get(); } } } CompletableFuture<Void> disconnected = connection.disconnect(); disconnected.get(); // Close the connection now that we are completely done with it. connection.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { System.out.println("Exception encountered: " + ex.toString()); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
496
0
Create_ds/aws-iot-device-sdk-java-v2/samples/Jobs/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/Jobs/src/main/java/jobs/Mqtt5JobsSample.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package jobs; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt.QualityOfService; import software.amazon.awssdk.crt.mqtt.MqttClientConnection; import software.amazon.awssdk.crt.mqtt5.packets.*; import software.amazon.awssdk.crt.mqtt5.*; import software.amazon.awssdk.crt.mqtt5.Mqtt5ClientOptions; import software.amazon.awssdk.crt.mqtt5.Mqtt5Client; import software.amazon.awssdk.iot.AwsIotMqtt5ClientBuilder; import software.amazon.awssdk.iot.iotjobs.IotJobsClient; import software.amazon.awssdk.iot.iotjobs.model.DescribeJobExecutionRequest; import software.amazon.awssdk.iot.iotjobs.model.DescribeJobExecutionResponse; import software.amazon.awssdk.iot.iotjobs.model.DescribeJobExecutionSubscriptionRequest; import software.amazon.awssdk.iot.iotjobs.model.GetPendingJobExecutionsRequest; import software.amazon.awssdk.iot.iotjobs.model.GetPendingJobExecutionsResponse; import software.amazon.awssdk.iot.iotjobs.model.GetPendingJobExecutionsSubscriptionRequest; import software.amazon.awssdk.iot.iotjobs.model.JobExecutionSummary; import software.amazon.awssdk.iot.iotjobs.model.JobStatus; import software.amazon.awssdk.iot.iotjobs.model.RejectedError; import software.amazon.awssdk.iot.iotjobs.model.StartNextJobExecutionResponse; import software.amazon.awssdk.iot.iotjobs.model.StartNextPendingJobExecutionRequest; import software.amazon.awssdk.iot.iotjobs.model.StartNextPendingJobExecutionSubscriptionRequest; import software.amazon.awssdk.iot.iotjobs.model.UpdateJobExecutionRequest; import software.amazon.awssdk.iot.iotjobs.model.UpdateJobExecutionSubscriptionRequest; import java.util.LinkedList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import utils.commandlineutils.CommandLineUtils; public class Mqtt5JobsSample { // When run normally, we want to check for jobs and process them // When run from CI, we want to just check for jobs static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = true; static CompletableFuture<Void> gotResponse; static List<String> availableJobs = new LinkedList<>(); static String currentJobId; static long currentExecutionNumber = 0; static int currentVersionNumber = 0; static CommandLineUtils cmdUtils; static final class SampleLifecycleEvents implements Mqtt5ClientOptions.LifecycleEvents { CompletableFuture<Void> connectedFuture = new CompletableFuture<>(); CompletableFuture<Void> stoppedFuture = new CompletableFuture<>(); @Override public void onAttemptingConnect(Mqtt5Client client, OnAttemptingConnectReturn onAttemptingConnectReturn) { System.out.println("Mqtt5 Client: Attempting connection..."); } @Override public void onConnectionSuccess(Mqtt5Client client, OnConnectionSuccessReturn onConnectionSuccessReturn) { System.out.println("Mqtt5 Client: Connection success, client ID: " + onConnectionSuccessReturn.getNegotiatedSettings().getAssignedClientID()); System.out.println("Connected to " + (!onConnectionSuccessReturn.getConnAckPacket().getSessionPresent() ? "new" : "existing") + " session!"); connectedFuture.complete(null); } @Override public void onConnectionFailure(Mqtt5Client client, OnConnectionFailureReturn onConnectionFailureReturn) { String errorString = CRT.awsErrorString(onConnectionFailureReturn.getErrorCode()); System.out.println("Mqtt5 Client: Connection failed with error: " + errorString); connectedFuture.completeExceptionally(new Exception("Could not connect: " + errorString)); } @Override public void onDisconnection(Mqtt5Client client, OnDisconnectionReturn onDisconnectionReturn) { System.out.println("Mqtt5 Client: Disconnected"); DisconnectPacket disconnectPacket = onDisconnectionReturn.getDisconnectPacket(); if (disconnectPacket != null) { System.out.println("\tDisconnection packet code: " + disconnectPacket.getReasonCode()); System.out.println("\tDisconnection packet reason: " + disconnectPacket.getReasonString()); } } @Override public void onStopped(Mqtt5Client client, OnStoppedReturn onStoppedReturn) { System.out.println("Mqtt5 Client: Stopped"); stoppedFuture.complete(null); } } static void onRejectedError(RejectedError error) { System.out.println("Request rejected: " + error.code.toString() + ": " + error.message); System.exit(1); } static void onGetPendingJobExecutionsAccepted(GetPendingJobExecutionsResponse response) { System.out.println( "Pending Jobs: " + (response.queuedJobs.size() + response.inProgressJobs.size() == 0 ? "none" : "")); for (JobExecutionSummary job : response.inProgressJobs) { availableJobs.add(job.jobId); System.out.println(" In Progress: " + job.jobId + " @ " + job.lastUpdatedAt.toString()); } for (JobExecutionSummary job : response.queuedJobs) { availableJobs.add(job.jobId); System.out.println(" " + job.jobId + " @ " + job.lastUpdatedAt.toString()); } gotResponse.complete(null); } static void onDescribeJobExecutionAccepted(DescribeJobExecutionResponse response) { System.out .println("Describe Job: " + response.execution.jobId + " version: " + response.execution.versionNumber); if (response.execution.jobDocument != null) { response.execution.jobDocument.forEach((key, value) -> { System.out.println(" " + key + ": " + value); }); } gotResponse.complete(null); } static void onStartNextPendingJobExecutionAccepted(StartNextJobExecutionResponse response) { System.out.println("Start Job: " + response.execution.jobId); currentJobId = response.execution.jobId; currentExecutionNumber = response.execution.executionNumber; currentVersionNumber = response.execution.versionNumber; gotResponse.complete(null); } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single * struct for * use in this sample. This handles all of the command line parsing, validating, * etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("Jobs", args); try { /** * Create the MQTT5 client from the builder */ SampleLifecycleEvents lifecycleEvents = new SampleLifecycleEvents(); AwsIotMqtt5ClientBuilder builder = AwsIotMqtt5ClientBuilder.newDirectMqttBuilderWithMtlsFromPath( cmdData.input_endpoint, cmdData.input_cert, cmdData.input_key); ConnectPacket.ConnectPacketBuilder connectProperties = new ConnectPacket.ConnectPacketBuilder(); connectProperties.withClientId(cmdData.input_clientId); builder.withConnectProperties(connectProperties); builder.withLifeCycleEvents(lifecycleEvents); Mqtt5Client client = builder.build(); builder.close(); MqttClientConnection connection = new MqttClientConnection(client, null); // Create the job client, IotJobsClient throws MqttException IotJobsClient jobs = new IotJobsClient(connection); // Connect client.start(); try { lifecycleEvents.connectedFuture.get(60, TimeUnit.SECONDS); } catch (Exception ex) { throw new RuntimeException("Exception occurred during connect", ex); } { gotResponse = new CompletableFuture<>(); GetPendingJobExecutionsSubscriptionRequest subscriptionRequest = new GetPendingJobExecutionsSubscriptionRequest(); subscriptionRequest.thingName = cmdData.input_thingName; CompletableFuture<Integer> subscribed = jobs.SubscribeToGetPendingJobExecutionsAccepted( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5JobsSample::onGetPendingJobExecutionsAccepted); try { subscribed.get(); System.out.println("Subscribed to GetPendingJobExecutionsAccepted"); } catch (Exception ex) { throw new RuntimeException("Failed to subscribe to GetPendingJobExecutions", ex); } subscribed = jobs.SubscribeToGetPendingJobExecutionsRejected( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5JobsSample::onRejectedError); subscribed.get(); System.out.println("Subscribed to GetPendingJobExecutionsRejected"); GetPendingJobExecutionsRequest publishRequest = new GetPendingJobExecutionsRequest(); publishRequest.thingName = cmdData.input_thingName; CompletableFuture<Integer> published = jobs.PublishGetPendingJobExecutions( publishRequest, QualityOfService.AT_LEAST_ONCE); try { published.get(); gotResponse.get(); } catch (Exception ex) { throw new RuntimeException("Exception occurred during publish", ex); } } if (availableJobs.isEmpty()) { System.out.println("No jobs queued, no further work to do"); // If sample is running in CI, there should be at least one job if (isCI == true) { throw new RuntimeException("No jobs queued in CI! At least one job should be queued!"); } } for (String jobId : availableJobs) { gotResponse = new CompletableFuture<>(); DescribeJobExecutionSubscriptionRequest subscriptionRequest = new DescribeJobExecutionSubscriptionRequest(); subscriptionRequest.thingName = cmdData.input_thingName; subscriptionRequest.jobId = jobId; jobs.SubscribeToDescribeJobExecutionAccepted( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5JobsSample::onDescribeJobExecutionAccepted); jobs.SubscribeToDescribeJobExecutionRejected( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5JobsSample::onRejectedError); DescribeJobExecutionRequest publishRequest = new DescribeJobExecutionRequest(); publishRequest.thingName = cmdData.input_thingName; publishRequest.jobId = jobId; publishRequest.includeJobDocument = true; publishRequest.executionNumber = 1L; jobs.PublishDescribeJobExecution(publishRequest, QualityOfService.AT_LEAST_ONCE); gotResponse.get(); } // If sample is not running in CI, then process the available jobs. if (isCI == false) { for (int jobIdx = 0; jobIdx < availableJobs.size(); ++jobIdx) { { gotResponse = new CompletableFuture<>(); // Start the next pending job StartNextPendingJobExecutionSubscriptionRequest subscriptionRequest = new StartNextPendingJobExecutionSubscriptionRequest(); subscriptionRequest.thingName = cmdData.input_thingName; jobs.SubscribeToStartNextPendingJobExecutionAccepted( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5JobsSample::onStartNextPendingJobExecutionAccepted); jobs.SubscribeToStartNextPendingJobExecutionRejected( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5JobsSample::onRejectedError); StartNextPendingJobExecutionRequest publishRequest = new StartNextPendingJobExecutionRequest(); publishRequest.thingName = cmdData.input_thingName; publishRequest.stepTimeoutInMinutes = 15L; jobs.PublishStartNextPendingJobExecution(publishRequest, QualityOfService.AT_LEAST_ONCE); gotResponse.get(); } { // Update the service to let it know we're executing gotResponse = new CompletableFuture<>(); UpdateJobExecutionSubscriptionRequest subscriptionRequest = new UpdateJobExecutionSubscriptionRequest(); subscriptionRequest.thingName = cmdData.input_thingName; subscriptionRequest.jobId = currentJobId; jobs.SubscribeToUpdateJobExecutionAccepted( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, (response) -> { System.out.println("Marked job " + currentJobId + " IN_PROGRESS"); gotResponse.complete(null); }); jobs.SubscribeToUpdateJobExecutionRejected( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5JobsSample::onRejectedError); UpdateJobExecutionRequest publishRequest = new UpdateJobExecutionRequest(); publishRequest.thingName = cmdData.input_thingName; publishRequest.jobId = currentJobId; publishRequest.executionNumber = currentExecutionNumber; publishRequest.status = JobStatus.IN_PROGRESS; publishRequest.expectedVersion = currentVersionNumber++; jobs.PublishUpdateJobExecution(publishRequest, QualityOfService.AT_LEAST_ONCE); gotResponse.get(); } // Fake doing something Thread.sleep(1000); { // Update the service to let it know we're done gotResponse = new CompletableFuture<>(); UpdateJobExecutionSubscriptionRequest subscriptionRequest = new UpdateJobExecutionSubscriptionRequest(); subscriptionRequest.thingName = cmdData.input_thingName; subscriptionRequest.jobId = currentJobId; jobs.SubscribeToUpdateJobExecutionAccepted( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, (response) -> { System.out.println("Marked job " + currentJobId + " SUCCEEDED"); gotResponse.complete(null); }); jobs.SubscribeToUpdateJobExecutionRejected( subscriptionRequest, QualityOfService.AT_LEAST_ONCE, Mqtt5JobsSample::onRejectedError); UpdateJobExecutionRequest publishRequest = new UpdateJobExecutionRequest(); publishRequest.thingName = cmdData.input_thingName; publishRequest.jobId = currentJobId; publishRequest.executionNumber = currentExecutionNumber; publishRequest.status = JobStatus.SUCCEEDED; publishRequest.expectedVersion = currentVersionNumber++; jobs.PublishUpdateJobExecution(publishRequest, QualityOfService.AT_LEAST_ONCE); gotResponse.get(); } } } // Disconnect client.stop(null); try { lifecycleEvents.stoppedFuture.get(60, TimeUnit.SECONDS); } catch (Exception ex) { System.out.println("Exception encountered: " + ex.toString()); System.exit(1); } /* Close the client to free memory */ connection.close(); client.close(); } catch (CrtRuntimeException | InterruptedException | ExecutionException ex) { System.out.println("Exception encountered: " + ex.toString()); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } }
497
0
Create_ds/aws-iot-device-sdk-java-v2/samples/Greengrass/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/Greengrass/src/main/java/greengrass/BasicDiscovery.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package greengrass; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.http.HttpProxyOptions; import software.amazon.awssdk.crt.io.*; import software.amazon.awssdk.crt.mqtt.*; import software.amazon.awssdk.iot.AwsIotMqttConnectionBuilder; import software.amazon.awssdk.iot.discovery.DiscoveryClient; import software.amazon.awssdk.iot.discovery.DiscoveryClientConfig; import software.amazon.awssdk.iot.discovery.model.ConnectivityInfo; import software.amazon.awssdk.iot.discovery.model.DiscoverResponse; import software.amazon.awssdk.iot.discovery.model.GGCore; import software.amazon.awssdk.iot.discovery.model.GGGroup; import java.io.File; import java.nio.charset.StandardCharsets; import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static software.amazon.awssdk.iot.discovery.DiscoveryClient.TLS_EXT_ALPN; import utils.commandlineutils.CommandLineUtils; public class BasicDiscovery { // When run normally, we want to exit nicely even if something goes wrong. // When run from CI, we want to let an exception escape which in turn causes the // exec:java task to return a non-zero exit code. static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); // Needed to access command line input data in getClientFromDiscovery static String input_thingName; static String input_certPath; static String input_keyPath; static CommandLineUtils cmdUtils; public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("BasicDiscovery", args); input_thingName = cmdData.input_thingName; input_certPath = cmdData.input_cert; input_keyPath = cmdData.input_key; try(final TlsContextOptions tlsCtxOptions = TlsContextOptions.createWithMtlsFromPath(cmdData.input_cert, cmdData.input_key)) { if(TlsContextOptions.isAlpnSupported()) { tlsCtxOptions.withAlpnList(TLS_EXT_ALPN); } if(cmdData.input_ca != null) { tlsCtxOptions.overrideDefaultTrustStoreFromPath(null, cmdData.input_ca); } HttpProxyOptions proxyOptions = null; if (cmdData.input_proxyHost != null && cmdData.input_proxyPort > 0) { proxyOptions = new HttpProxyOptions(); proxyOptions.setHost(cmdData.input_proxyHost); proxyOptions.setPort(cmdData.input_proxyPort); } try ( final SocketOptions socketOptions = new SocketOptions(); final DiscoveryClientConfig discoveryClientConfig = new DiscoveryClientConfig(tlsCtxOptions, socketOptions, cmdData.input_signingRegion, 1, proxyOptions); final DiscoveryClient discoveryClient = new DiscoveryClient(discoveryClientConfig)) { DiscoverResponse response = discoveryClient.discover(input_thingName).get(60, TimeUnit.SECONDS); if (isCI) { System.out.println("Received a greengrass discovery result! Not showing result in CI for possible data sensitivity."); } else { printGreengrassGroupList(response.getGGGroups(), ""); } if (cmdData.inputPrintDiscoverRespOnly == false) { try (final MqttClientConnection connection = getClientFromDiscovery(discoveryClient)) { if ("subscribe".equals(cmdData.input_mode) || "both".equals(cmdData.input_mode)) { final CompletableFuture<Integer> subFuture = connection.subscribe(cmdData.input_topic, QualityOfService.AT_MOST_ONCE, message -> { System.out.println(String.format("Message received on topic %s: %s", message.getTopic(), new String(message.getPayload(), StandardCharsets.UTF_8))); }); subFuture.get(); } final Scanner scanner = new Scanner(System.in); while (true) { String input = null; if ("publish".equals(cmdData.input_mode) || "both".equals(cmdData.input_mode)) { System.out.println("Enter the message you want to publish to topic " + cmdData.input_topic + " and press Enter. " + "Type 'exit' or 'quit' to exit this program: "); input = scanner.nextLine(); } if ("exit".equals(input) || "quit".equals(input)) { System.out.println("Terminating..."); break; } if ("publish".equals(cmdData.input_mode) || "both".equals(cmdData.input_mode)) { final CompletableFuture<Integer> publishResult = connection.publish(new MqttMessage(cmdData.input_topic, input.getBytes(StandardCharsets.UTF_8), QualityOfService.AT_MOST_ONCE, false)); Integer result = publishResult.get(); } } } } } } catch (CrtRuntimeException | InterruptedException | ExecutionException | TimeoutException ex) { System.out.println("Exception thrown: " + ex.toString()); ex.printStackTrace(); } CrtResource.waitForNoResources(); System.out.println("Complete!"); } private static void printGreengrassGroupList(List<GGGroup> groupList, String prefix) { for (int i = 0; i < groupList.size(); i++) { GGGroup group = groupList.get(i); System.out.println(prefix + "Group ID: " + group.getGGGroupId()); printGreengrassCoreList(group.getCores(), " "); } } private static void printGreengrassCoreList(List<GGCore> coreList, String prefix) { for (int i = 0; i < coreList.size(); i++) { GGCore core = coreList.get(i); System.out.println(prefix + "Thing ARN: " + core.getThingArn()); printGreengrassConnectivityList(core.getConnectivity(), prefix + " "); } } private static void printGreengrassConnectivityList(List<ConnectivityInfo> connectivityList, String prefix) { for (int i = 0; i < connectivityList.size(); i++) { ConnectivityInfo connectivityInfo = connectivityList.get(i); System.out.println(prefix + "Connectivity ID: " + connectivityInfo.getId()); System.out.println(prefix + "Connectivity Host Address: " + connectivityInfo.getHostAddress()); System.out.println(prefix + "Connectivity Port: " + connectivityInfo.getPortNumber()); } } private static MqttClientConnection getClientFromDiscovery(final DiscoveryClient discoveryClient ) throws ExecutionException, InterruptedException { final CompletableFuture<DiscoverResponse> futureResponse = discoveryClient.discover(input_thingName); final DiscoverResponse response = futureResponse.get(); if(response.getGGGroups() != null) { final Optional<GGGroup> groupOpt = response.getGGGroups().stream().findFirst(); if(groupOpt.isPresent()) { final GGGroup group = groupOpt.get(); final GGCore core = group.getCores().stream().findFirst().get(); for (ConnectivityInfo connInfo : core.getConnectivity()) { final String dnsOrIp = connInfo.getHostAddress(); final Integer port = connInfo.getPortNumber(); System.out.println(String.format("Connecting to group ID %s, with thing arn %s, using endpoint %s:%d", group.getGGGroupId(), core.getThingArn(), dnsOrIp, port)); final AwsIotMqttConnectionBuilder connectionBuilder = AwsIotMqttConnectionBuilder.newMtlsBuilderFromPath(input_certPath, input_keyPath) .withClientId(input_thingName) .withPort(port.shortValue()) .withEndpoint(dnsOrIp) .withConnectionEventCallbacks(new MqttClientConnectionEvents() { @Override public void onConnectionInterrupted(int errorCode) { System.out.println("Connection interrupted: " + errorCode); } @Override public void onConnectionResumed(boolean sessionPresent) { System.out.println("Connection resumed!"); } }); if (group.getCAs() != null) { connectionBuilder.withCertificateAuthority(group.getCAs().get(0)); } try (MqttClientConnection connection = connectionBuilder.build()) { if (connection.connect().get()) { System.out.println("Session resumed"); } else { System.out.println("Started a clean session"); } /* This lets the connection escape the try block without getting cleaned up */ connection.addRef(); return connection; } catch (Exception e) { System.out.println(String.format("Connection failed with exception %s", e.toString())); } } throw new RuntimeException("ThingName " + input_thingName + " could not connect to the green grass core using any of the endpoint connectivity options"); } } throw new RuntimeException("ThingName " + input_thingName + " does not have a Greengrass group/core configuration"); } }
498
0
Create_ds/aws-iot-device-sdk-java-v2/samples/Mqtt5/SharedSubscription/src/main/java
Create_ds/aws-iot-device-sdk-java-v2/samples/Mqtt5/SharedSubscription/src/main/java/sharedsubscription/SharedSubscription.java
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ package mqtt5.sharedsubscription; import software.amazon.awssdk.crt.CRT; import software.amazon.awssdk.crt.CrtResource; import software.amazon.awssdk.crt.CrtRuntimeException; import software.amazon.awssdk.crt.mqtt5.*; import software.amazon.awssdk.crt.mqtt5.Mqtt5ClientOptions.LifecycleEvents; import software.amazon.awssdk.crt.mqtt5.packets.*; import software.amazon.awssdk.iot.AwsIotMqtt5ClientBuilder; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import utils.commandlineutils.CommandLineUtils; public class SharedSubscription { /** * When run normally, we want to exit nicely even if something goes wrong * When run from CI, we want to let an exception escape which in turn causes the * exec:java task to return a non-zero exit code */ static String ciPropValue = System.getProperty("aws.crt.ci"); static boolean isCI = ciPropValue != null && Boolean.valueOf(ciPropValue); /* Used for command line processing */ static CommandLineUtils cmdUtils; /* * When called during a CI run, throw an exception that will escape and fail the exec:java task * When called otherwise, print what went wrong (if anything) and just continue (return from main) */ static void onApplicationFailure(Throwable cause) { if (isCI) { throw new RuntimeException("Mqtt5 SharedSubscription: execution failure", cause); } else if (cause != null) { System.out.println("Exception encountered: " + cause.toString()); } } /** * The interface that contains the functions invoked when the MQTT5 has a life-cycle event * (connect, disconnect, etc) that can be reacted to. */ static final class SampleLifecycleEvents implements Mqtt5ClientOptions.LifecycleEvents { SampleMqtt5Client sampleClient; CompletableFuture<Void> connectedFuture = new CompletableFuture<>(); CompletableFuture<Void> stoppedFuture = new CompletableFuture<>(); SampleLifecycleEvents(SampleMqtt5Client client) { sampleClient = client; if (sampleClient == null) { System.out.println("Invalid sample client passed to SampleLifecycleEvents"); } } @Override public void onAttemptingConnect(Mqtt5Client client, OnAttemptingConnectReturn onAttemptingConnectReturn) { System.out.println("[" + sampleClient.name + "]: Attempting connection..."); } @Override public void onConnectionSuccess(Mqtt5Client client, OnConnectionSuccessReturn onConnectionSuccessReturn) { System.out.println("[" + sampleClient.name + "]: Connection success, client ID: " + onConnectionSuccessReturn.getNegotiatedSettings().getAssignedClientID()); connectedFuture.complete(null); } @Override public void onConnectionFailure(Mqtt5Client client, OnConnectionFailureReturn onConnectionFailureReturn) { String errorString = CRT.awsErrorString(onConnectionFailureReturn.getErrorCode()); System.out.println("[" + sampleClient.name + "]: Connection failed with error: " + errorString); connectedFuture.completeExceptionally(new Exception("Could not connect: " + errorString)); } @Override public void onDisconnection(Mqtt5Client client, OnDisconnectionReturn onDisconnectionReturn) { System.out.println("[" + sampleClient.name + "]: Disconnected"); DisconnectPacket disconnectPacket = onDisconnectionReturn.getDisconnectPacket(); if (disconnectPacket != null) { System.out.println("\tDisconnection packet code: " + disconnectPacket.getReasonCode()); System.out.println("\tDisconnection packet reason: " + disconnectPacket.getReasonString()); if (disconnectPacket.getReasonCode() == DisconnectPacket.DisconnectReasonCode.SHARED_SUBSCRIPTIONS_NOT_SUPPORTED) { /* Stop the client, which will interrupt the subscription and stop the sample */ client.stop(null); } } } @Override public void onStopped(Mqtt5Client client, OnStoppedReturn onStoppedReturn) { System.out.println("[" + sampleClient.name + "]: Stopped"); stoppedFuture.complete(null); } } /** * The interface that contains the functions invoked when the MQTT5 client gets a message/publish * on a topic the MQTT5 client has subscribed to. */ static final class SamplePublishEvents implements Mqtt5ClientOptions.PublishEvents { SampleMqtt5Client sampleClient; SamplePublishEvents(SampleMqtt5Client client) { sampleClient = client; } @Override public void onMessageReceived(Mqtt5Client client, PublishReturn publishReturn) { if (sampleClient != null && sampleClient.client == client) { System.out.println("[" + sampleClient.name + "] Received a publish"); } PublishPacket publishPacket = publishReturn.getPublishPacket(); if (publishPacket != null) { System.out.println("\tPublish received on topic: " + publishPacket.getTopic()); System.out.println("\tMessage: " + new String(publishPacket.getPayload())); List<UserProperty> packetProperties = publishPacket.getUserProperties(); if (packetProperties != null) { for (int i = 0; i < packetProperties.size(); i++) { UserProperty property = packetProperties.get(i); System.out.println("\t\twith UserProperty: (" + property.key + ", " + property.value + ")"); } } } } } /** * For the purposes of this sample, we need to associate certain variables with a particular MQTT5 client * and to do so we use this class to hold all the data for a particular client used in the sample. */ static final class SampleMqtt5Client { Mqtt5Client client; String name; SamplePublishEvents publishEvents; SampleLifecycleEvents lifecycleEvents; /** * Creates a MQTT5 client using direct MQTT5 via mTLS with the passed input data. */ public static SampleMqtt5Client createMqtt5Client( String input_endpoint, String input_cert, String input_key, String input_ca, String input_clientId, String input_clientName) { SampleMqtt5Client sampleClient = new SampleMqtt5Client(); SamplePublishEvents publishEvents = new SamplePublishEvents(sampleClient); SampleLifecycleEvents lifecycleEvents = new SampleLifecycleEvents(sampleClient); Mqtt5Client client; try { AwsIotMqtt5ClientBuilder builder = AwsIotMqtt5ClientBuilder.newDirectMqttBuilderWithMtlsFromPath(input_endpoint, input_cert, input_key); ConnectPacket.ConnectPacketBuilder connectProperties = new ConnectPacket.ConnectPacketBuilder(); connectProperties.withClientId(input_clientId); builder.withConnectProperties(connectProperties); if (input_ca != "") { builder.withCertificateAuthorityFromPath(null, input_ca); } builder.withLifeCycleEvents(lifecycleEvents); builder.withPublishEvents(publishEvents); client = builder.build(); builder.close(); } catch (CrtRuntimeException ex) { System.out.println("Client creation failed!"); return null; } sampleClient.client = client; sampleClient.name = input_clientName; sampleClient.publishEvents = publishEvents; sampleClient.lifecycleEvents = lifecycleEvents; return sampleClient; } } public static void main(String[] args) { /** * cmdData is the arguments/input from the command line placed into a single struct for * use in this sample. This handles all of the command line parsing, validating, etc. * See the Utils/CommandLineUtils for more information. */ CommandLineUtils.SampleCommandLineData cmdData = CommandLineUtils.getInputForIoTSample("Mqtt5SharedSubscription", args); /* Construct the shared topic */ String input_sharedTopic = "$share/" + cmdData.input_groupIdentifier + "/" + cmdData.input_topic; /* This sample uses a publisher and two subscribers */ SampleMqtt5Client publisher = null; SampleMqtt5Client subscriberOne = null; SampleMqtt5Client subscriberTwo = null; try { /* Create the MQTT5 clients: one publisher and two subscribers */ publisher = SampleMqtt5Client.createMqtt5Client( cmdData.input_endpoint, cmdData.input_cert, cmdData.input_key, cmdData.input_ca, cmdData.input_clientId + '1', "Publisher"); subscriberOne = SampleMqtt5Client.createMqtt5Client( cmdData.input_endpoint, cmdData.input_cert, cmdData.input_key, cmdData.input_ca, cmdData.input_clientId + '2', "Subscriber One"); subscriberTwo = SampleMqtt5Client.createMqtt5Client( cmdData.input_endpoint, cmdData.input_cert, cmdData.input_key, cmdData.input_ca, cmdData.input_clientId + '3', "Subscriber Two"); /* Connect all the clients */ publisher.client.start(); publisher.lifecycleEvents.connectedFuture.get(60, TimeUnit.SECONDS); System.out.println("[" + publisher.name + "]: Connected"); subscriberOne.client.start(); subscriberOne.lifecycleEvents.connectedFuture.get(60, TimeUnit.SECONDS); System.out.println("[" + subscriberOne.name + "]: Connected"); subscriberTwo.client.start(); subscriberTwo.lifecycleEvents.connectedFuture.get(60, TimeUnit.SECONDS); System.out.println("[" + subscriberTwo.name + "]: Connected"); /* Subscribe to the shared topic on the two subscribers */ SubscribePacket.SubscribePacketBuilder subscribeBuilder = new SubscribePacket.SubscribePacketBuilder(); subscribeBuilder.withSubscription(input_sharedTopic, QOS.AT_LEAST_ONCE, false, false, SubscribePacket.RetainHandlingType.DONT_SEND); subscriberOne.client.subscribe(subscribeBuilder.build()).get(60, TimeUnit.SECONDS); System.out.println( "[" + subscriberOne.name + "]: Subscribed to topic '" + cmdData.input_topic + "' in shared subscription group '" + cmdData.input_groupIdentifier + "'."); System.out.println("[" + subscriberOne.name + "]: Full subscribed topic is '" + input_sharedTopic + "'."); subscriberTwo.client.subscribe(subscribeBuilder.build()).get(60, TimeUnit.SECONDS); System.out.println( "[" + subscriberTwo.name + "]: Subscribed to topic '" + cmdData.input_topic + "' in shared subscription group '" + cmdData.input_groupIdentifier + "'."); System.out.println("[" + subscriberTwo.name + "]: Full subscribed topic is '" + input_sharedTopic + "'."); /* Publish using the publisher client */ PublishPacket.PublishPacketBuilder publishBuilder = new PublishPacket.PublishPacketBuilder(); publishBuilder.withTopic(cmdData.input_topic).withQOS(QOS.AT_LEAST_ONCE); int count = 0; if (cmdData.input_count > 0) { while (count++ < cmdData.input_count) { publishBuilder.withPayload(("\"" + cmdData.input_message + ": " + String.valueOf(count) + "\"").getBytes()); publisher.client.publish(publishBuilder.build()).get(60, TimeUnit.SECONDS); System.out.println("[" + publisher.name + "]: Sent publish"); Thread.sleep(1000); } /* Wait 5 seconds to let the last publish go out before unsubscribing */ Thread.sleep(5000); } else { System.out.println("Skipping publishing messages due to message count being zero..."); } /* Unsubscribe from the shared topic on the two subscribers */ UnsubscribePacket.UnsubscribePacketBuilder unsubscribeBuilder = new UnsubscribePacket.UnsubscribePacketBuilder(); unsubscribeBuilder.withSubscription(input_sharedTopic); subscriberOne.client.unsubscribe(unsubscribeBuilder.build()).get(60, TimeUnit.SECONDS); System.out.println( "[" + subscriberOne.name + "]: Unsubscribed to topic '" + cmdData.input_topic + "' in shared subscription group '" + cmdData.input_groupIdentifier + "'."); System.out.println("[" + subscriberOne.name + "]: Full unsubscribed topic is '" + input_sharedTopic + "'."); subscriberTwo.client.unsubscribe(unsubscribeBuilder.build()).get(60, TimeUnit.SECONDS); System.out.println( "[" + subscriberTwo.name + "]: Unsubscribed to topic '" + cmdData.input_topic + "' in shared subscription group '" + cmdData.input_groupIdentifier + "'."); System.out.println("[" + subscriberTwo.name + "]: Full unsubscribed topic is '" + input_sharedTopic + "'."); /* Disconnect all the clients */ publisher.client.stop(null); publisher.lifecycleEvents.stoppedFuture.get(60, TimeUnit.SECONDS); System.out.println("[" + publisher.name + "]: Fully stopped"); subscriberOne.client.stop(null); subscriberOne.lifecycleEvents.stoppedFuture.get(60, TimeUnit.SECONDS); System.out.println("[" + subscriberOne.name + "]: Fully stopped"); subscriberTwo.client.stop(null); subscriberTwo.lifecycleEvents.stoppedFuture.get(60, TimeUnit.SECONDS); System.out.println("[" + subscriberTwo.name + "]: Fully stopped"); } catch (Exception ex) { /* Something bad happened, abort and report! */ onApplicationFailure(ex); } finally { /* Close all the MQTT5 clients to make sure no native memory is leaked */ if (publisher != null && publisher.client != null) { publisher.client.close(); } if (subscriberOne != null && subscriberOne.client != null) { subscriberOne.client.close(); } if (subscriberTwo != null && subscriberTwo.client != null) { subscriberTwo.client.close(); } CrtResource.waitForNoResources(); } System.out.println("Complete!"); } }
499