index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset/QueryableTcProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.dataset;
import org.apache.clerezza.IRI;
/**
* Extends the TcProvider interface for providers that support sparql queries.
*/
public interface QueryableTcProvider extends TcProvider {
/**
* Executes any sparql query. The type of the result object will vary
* depending on the type of the query.
*
* @param query
* the sparql query to execute
* @param defaultGraphUri
* the default ImmutableGraph against which to execute the query if not
* FROM clause is present
* @return the resulting ResultSet, ImmutableGraph or Boolean value
*/
public Object executeSparqlQuery(String query, IRI defaultGraphUri);
}
| 300 |
0 | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset/EntityUndeletableException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.dataset;
import org.apache.clerezza.IRI;
/**
* is thrown on an attempt to delete an entity with a provider that
* supports the delete operation when the specified entity cannot be deleted
*
* @author reto
*/
public class EntityUndeletableException extends RuntimeException {
private IRI entityName;
/**
* creates an exception indicating that the entity with the specified name
* cannot be deleted
*
* @param entityName the name of the entity which is undeletable
*/
public EntityUndeletableException(IRI entityName) {
super("This entity is undeletable: "+entityName);
this.entityName = entityName;
}
/**
*
* @return the name of the entity which is undeletable
*/
public IRI getEntityName() {
return entityName;
}
}
| 301 |
0 | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset/TcProviderMultiplexer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.dataset;
import org.apache.clerezza.Graph;
import org.apache.clerezza.IRI;
import org.apache.clerezza.ImmutableGraph;
import java.lang.ref.WeakReference;
import java.util.*;
/**
* This makes a set of WeightedTcProvider appear as one TcProvider. It delegates
* requests to the WeightedTcProvider with the highest Weight
*
* @author reto
*/
public class TcProviderMultiplexer implements TcProvider {
protected SortedSet<WeightedTcProvider> providerList = new TreeSet<>(
new WeightedProviderComparator());
/**
* Mapping to Graph's and ServiceRegistration using their URI's as key.
* Makes sure that per URI only one instance of the Graph is used,
* otherwise the locks in the <code>Graph</code>s would have no effect
* between different instances and concurrency issues could occur.
*/
private Map<IRI, MGraphHolder> mGraphCache = Collections.synchronizedMap(new HashMap<IRI, MGraphHolder>());
/**
* Flag to indicate whether mgraphs should be cached for faster access. By
* default caching is enabled for backward compatibility.
*/
private boolean isCachingEnabled = true;
/**
* Registers a provider
*
* @param provider
* the provider to be registered
*/
public void addWeightedTcProvider(WeightedTcProvider provider) {
providerList.add(provider);
updateGraphCache(provider, true);
}
/**
* Unregister a provider
*
* @param provider
* the provider to be deregistered
*/
public void removeWeightedTcProvider(
WeightedTcProvider provider) {
providerList.remove(provider);
updateGraphCache(provider, false);
}
/**
* subclasses overwrite this method to be notified when a new
* ImmutableGraph is available (either because it has been created or being
* provided by a newly added WeightedTcProvider). The default implementation
* does nothing.
*
* @param name
*/
protected void graphAppears(IRI name) {
}
/**
* subclasses overwrite this method to be notified when a new
* Graph is available (either because it has been created or being
* provided by a newly added WeightedTcProvider). The default implementation
* does nothing.
*
* @param name
*/
protected void mGraphAppears(IRI name) {
}
/**
* subclasses overwrite this method to be notified whenGraph is
* no longer available (either because it has been deleted or bacause its
* WeightedTcProvider was removed). The default implementation does nothing.
*
* for implementational reasons even for name of Graph not
* previously registered.
*
* @param name
*/
protected void tcDisappears(IRI name) {
}
/**
* Updates the lockableMGraphCache AFTER a new <code>provider</code> was
* bound or unbound.
* This method also takes care of registering and unregistering
* provided triple collections as services based on the weight of
* all affected providers.
*
* @param provider
* the provider that was added or removed
* @param providerAdded
* <code>boolean</code> that should be set as <code>true</code>
* if <code>provider</code> was added to
* <code>org.apache.clerezza.rdf.core.TcManager.providerList</code>
* otherwise <code>false</code>
*/
private void updateGraphCache(WeightedTcProvider provider,
boolean providerAdded) {
Set<IRI> uriSet = provider.listGraphs();
if (!(uriSet == null || uriSet.isEmpty())) {
if (providerAdded) {
weightedProviderAdded(provider, uriSet);
} else {
weightedProviderRemoved(provider, uriSet);
}
}
}
private void weightedProviderAdded(WeightedTcProvider newProvider,
Set<IRI> newProvidedUris) {
Set<WeightedTcProvider> lowerWeightedProviderList = getLowerWeightedProvider(newProvider);
if (isCachingEnabled()) {
for (IRI name : newProvidedUris) {
final MGraphHolder holder = mGraphCache.get(name);
if ((holder != null) && (holder.getWeightedTcProvider() != null)) {
if (lowerWeightedProviderList.contains(holder.getWeightedTcProvider())) {
tcDisappears(name);
mGraphCache.remove(name);
} else {
continue;
}
}
Graph triples = newProvider.getGraph(name);
if (triples instanceof Graph) {
mGraphCache.put(name, new MGraphHolder(newProvider, ensureLockable((Graph)triples)));
mGraphAppears(name);
} else {
graphAppears(name);
}
}
}
}
private Set<WeightedTcProvider> getLowerWeightedProvider(
WeightedTcProvider newProvider) {
boolean referenceProviderPassed = false;
Set<WeightedTcProvider> lowerWeightedProviderList = new HashSet<WeightedTcProvider>();
for (WeightedTcProvider weightedProvider : providerList) {
if (referenceProviderPassed) {
lowerWeightedProviderList.add(weightedProvider);
} else if (newProvider.equals(weightedProvider)) {
referenceProviderPassed = true;
}
}
return lowerWeightedProviderList;
}
private void weightedProviderRemoved(WeightedTcProvider oldProvider,
Set<IRI> oldProvidedUris) {
for (IRI name : oldProvidedUris) {
final MGraphHolder holder = mGraphCache.get(name);
if ((holder != null) && (holder.getWeightedTcProvider() != null)
&& holder.getWeightedTcProvider().equals(oldProvider)) {
tcDisappears(name);
mGraphCache.remove(name);
if (isCachingEnabled()) {
// check if another WeightedTcProvider has the Graph.
// And if so register as service.
for (WeightedTcProvider provider : providerList) {
try {
Graph triples = provider.getGraph(name);
if (triples instanceof Graph) {
mGraphCache.put(name, new MGraphHolder(provider, ensureLockable((Graph)triples)));
mGraphAppears(name);
} else {
graphAppears(name);
}
break;
} catch (NoSuchEntityException e) {
// continue;
}
}
}
}
}
}
@Override
public ImmutableGraph getImmutableGraph(IRI name) throws NoSuchEntityException {
for (TcProvider provider : providerList) {
try {
return provider.getImmutableGraph(name);
} catch (NoSuchEntityException e) {
//we do nothing and try our luck with the next provider
} catch (IllegalArgumentException e) {
//we do nothing and try our luck with the next provider
}
}
throw new NoSuchEntityException(name);
}
@Override
public Graph getMGraph(IRI name)
throws NoSuchEntityException {
Graph result = getMGraphFromCache(name);
if (result == null) {
synchronized (this) {
result = getMGraphFromCache(name);
if (result == null) {
result = getUnsecuredMGraphAndAddToCache(name);
}
}
}
return result;
}
private Graph getMGraphFromCache(IRI name) {
MGraphHolder holder = mGraphCache.get(name);
if (holder == null) {
return null;
}
return holder.getMGraph();
}
private Graph getUnsecuredMGraphAndAddToCache(IRI name)
throws NoSuchEntityException {
for (WeightedTcProvider provider : providerList) {
try {
Graph providedMGraph = provider.getMGraph(name);
Graph result = ensureLockable(providedMGraph);
if (isCachingEnabled()) {
mGraphCache.put(name, new MGraphHolder(
provider, result));
}
return result;
} catch (NoSuchEntityException e) {
//we do nothing and try our luck with the next provider
} catch (IllegalArgumentException e) {
//we do nothing and try our luck with the next provider
}
}
throw new NoSuchEntityException(name);
}
@Override
public Graph getGraph(IRI name)
throws NoSuchEntityException {
Graph result;
for (WeightedTcProvider provider : providerList) {
try {
result = provider.getGraph(name);
if (result instanceof ImmutableGraph) {
return result;
} else {
// This is to ensure the Graph gets added to the cache
return getMGraph(name);
}
} catch (NoSuchEntityException e) {
//we do nothing and try our luck with the next provider
} catch (IllegalArgumentException e) {
//we do nothing and try our luck with the next provider
}
}
throw new NoSuchEntityException(name);
}
@Override
public Graph createGraph(IRI name)
throws UnsupportedOperationException {
for (WeightedTcProvider provider : providerList) {
try {
Graph result = provider.createGraph(name);
// unregisters a possible ImmutableGraph or Graph service under this name
// provided by a WeightedTcProvider with a lower weight.
tcDisappears(name);
if (isCachingEnabled()) {
mGraphCache.put(name, new MGraphHolder(provider, null));
}
mGraphAppears(name);
return result;
} catch (UnsupportedOperationException e) {
//we do nothing and try our luck with the next provider
} catch (IllegalArgumentException e) {
//we do nothing and try our luck with the next provider
}
}
throw new UnsupportedOperationException(
"No provider could create Graph.");
}
@Override
public ImmutableGraph createImmutableGraph(IRI name, Graph triples) {
for (WeightedTcProvider provider : providerList) {
try {
ImmutableGraph result = provider.createImmutableGraph(name, triples);
// unregisters a possible ImmutableGraph or Graph service under this name
// provided by a WeightedTcProvider with a lower weight.
tcDisappears(name);
if (isCachingEnabled()) {
mGraphCache.put(name, new MGraphHolder(provider, null));
}
graphAppears(name);
return result;
} catch (UnsupportedOperationException e) {
//we do nothing and try our luck with the next provider
} catch (IllegalArgumentException e) {
//we do nothing and try our luck with the next provider
}
}
throw new UnsupportedOperationException(
"No provider could create ImmutableGraph.");
}
@Override
public void deleteGraph(IRI name) {
for (TcProvider provider : providerList) {
try {
provider.deleteGraph(name);
final MGraphHolder holder = mGraphCache.get(name);
if ((holder != null)
&& (holder.getWeightedTcProvider() != null)
&& holder.getWeightedTcProvider().equals(provider)) {
tcDisappears(name);
mGraphCache.remove(name);
}
return;
} catch (UnsupportedOperationException e) {
// we do nothing and try our luck with the next provider
} catch (NoSuchEntityException e) {
//we do nothing and try our luck with the next provider
} catch (IllegalArgumentException e) {
//we do nothing and try our luck with the next provider
}
}
// this throws a NoSuchEntityException if the ImmutableGraph doesn't exist
getGraph(name);
// the entity exists but cannot be deleted
throw new UnsupportedOperationException(
"No provider could delete the entity.");
}
@Override
public Set<IRI> getNames(ImmutableGraph ImmutableGraph) {
Set<IRI> result = new HashSet<IRI>();
for (TcProvider provider : providerList) {
result.addAll(provider.getNames(ImmutableGraph));
}
return result;
}
@Override
public Set<IRI> listGraphs() {
Set<IRI> result = new HashSet<IRI>();
for (TcProvider provider : providerList) {
result.addAll(provider.listGraphs());
}
return result;
}
@Override
public Set<IRI> listMGraphs() {
Set<IRI> result = new HashSet<IRI>();
for (TcProvider provider : providerList) {
result.addAll(provider.listMGraphs());
}
return result;
}
@Override
public Set<IRI> listImmutableGraphs() {
Set<IRI> result = new HashSet<IRI>();
for (TcProvider provider : providerList) {
result.addAll(provider.listImmutableGraphs());
}
return result;
}
private Graph ensureLockable(Graph providedMGraph) {
//Graphs are alway locable now
return providedMGraph;
}
/**
* Contains an unsecured Graph, a ServiceRegistration and
* the WeightedTcProvider that generated the ImmutableGraph
*/
private static class MGraphHolder {
private WeightedTcProvider tcProvider;
private WeakReference<Graph> mGraphReference;
MGraphHolder(WeightedTcProvider tcProvider, Graph graph) {
this.tcProvider = tcProvider;
this.mGraphReference = new WeakReference<Graph>(graph);
}
Graph getMGraph() {
return this.mGraphReference.get();
}
WeightedTcProvider getWeightedTcProvider() {
return this.tcProvider;
}
}
//methods for debuging / monitoring
public SortedSet<WeightedTcProvider> getProviderList() {
return providerList;
}
public boolean isCachingEnabled() {
return isCachingEnabled;
}
public void setCachingEnabled(boolean isCachingEnabled) {
this.isCachingEnabled = isCachingEnabled;
if (!isCachingEnabled()) {
mGraphCache.clear();
}
}
}
| 302 |
0 | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset/ImmutableGraphServiceFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.dataset;
import org.apache.clerezza.Graph;
import org.apache.clerezza.IRI;
import org.apache.clerezza.dataset.security.TcAccessController;
import org.osgi.framework.Bundle;
import org.osgi.framework.ServiceFactory;
import org.osgi.framework.ServiceRegistration;
/**
* @see <a href="http://www.osgi.org/javadoc/r4v41/org/osgi/framework/ServiceFactory.html">
* Interface ServiceFactory</a>
*
* @author mir
*/
public class ImmutableGraphServiceFactory implements ServiceFactory {
private final TcManager tcManager;
private final IRI name;
private final TcAccessController tcAccessController;
ImmutableGraphServiceFactory(TcManager tcManager, IRI name,
TcAccessController tcAccessController) {
this.tcManager = tcManager;
this.name = name;
this.tcAccessController = tcAccessController;
}
@Override
public Object getService(Bundle arg0, ServiceRegistration arg1) {
Graph tc =
new SecuredGraph(tcManager.getImmutableGraph(name), name,
tcAccessController);
return tc.getImmutableGraph();
}
@Override
public void ungetService(Bundle arg0, ServiceRegistration arg1, Object arg2) {
}
}
| 303 |
0 | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset/EntityAlreadyExistsException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.dataset;
import org.apache.clerezza.IRI;
/**
* is thrown on an attempt to create an entity with a name which already exists
*
* @author hasan
*/
public class EntityAlreadyExistsException extends RuntimeException {
private IRI entityName;
/**
* creates an exception indicating that an entity with the specified name
* already exists.
*
* @param entityName the name of the entity which already exists
*/
public EntityAlreadyExistsException(IRI entityName) {
super("An entity with this name already exists: "+entityName);
this.entityName = entityName;
}
/**
*
* @return the name of the entity which already exists
*/
public IRI getEntityName() {
return entityName;
}
}
| 304 |
0 | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset/WeightedProviderComparator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.dataset;
import java.util.Comparator;
/**
* Compares the WeightedTcManagementProviders, descending for weight and
* ascending by name
*/
public class WeightedProviderComparator implements Comparator<WeightedTcProvider> {
@Override
public int compare(WeightedTcProvider o1, WeightedTcProvider o2) {
int o1Weight = o1.getWeight();
int o2Weight = o2.getWeight();
if (o1Weight != o2Weight) {
return o2Weight - o1Weight;
}
return o1.getClass().toString().compareTo(o2.getClass().toString());
}
}
| 305 |
0 | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset/SecuredGraph.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.dataset;
import org.apache.clerezza.*;
import org.apache.clerezza.implementation.in_memory.SimpleImmutableGraph;
import org.apache.clerezza.implementation.graph.WriteBlockedGraph;
import org.apache.clerezza.dataset.security.TcAccessController;
import java.security.AccessControlException;
import java.util.Collection;
import java.util.Iterator;
import java.util.concurrent.locks.ReadWriteLock;
/**
* A Secured triple collection wraps a triple collection checking each access
* for the rights on a the ImmutableGraph for which the uri is passed to the
* constructor.
*
* @author mir, hasan
*/
public class SecuredGraph implements Graph {
private final Graph wrapped;
private final IRI name;
private final TcAccessController tcAccessController;
public SecuredGraph(Graph wrapped, IRI name,
TcAccessController tcAccessController) {
this.wrapped = wrapped;
this.name = name;
this.tcAccessController = tcAccessController;
}
@Override
public Iterator<Triple> filter(final BlankNodeOrIRI subject, final IRI predicate, final RDFTerm object) {
final Iterator<Triple> baseIter = wrapped.filter(subject, predicate, object);
return new Iterator<Triple>() {
@Override
public boolean hasNext() {
checkRead();
return baseIter.hasNext();
}
@Override
public Triple next() {
checkRead();
return baseIter.next();
}
@Override
public void remove() {
checkWrite();
baseIter.remove();
}
};
}
@Override
public int size() {
checkRead();
return wrapped.size();
}
@Override
public boolean isEmpty() {
checkRead();
return wrapped.isEmpty();
}
@Override
public Object[] toArray() {
checkRead();
return wrapped.toArray();
}
@Override
public <T> T[] toArray(T[] a) {
checkRead();
return wrapped.toArray(a);
}
@Override
public boolean add(Triple e) {
checkWrite();
return wrapped.add(e);
}
@Override
public boolean remove(Object o) {
checkWrite();
return wrapped.remove(o);
}
@Override
public boolean addAll(Collection<? extends Triple> c) {
checkWrite();
return wrapped.addAll(c);
}
@Override
public boolean removeAll(Collection<?> c) {
checkWrite();
return wrapped.removeAll(c);
}
@Override
public boolean retainAll(Collection<?> c) {
checkWrite();
return wrapped.retainAll(c);
}
@Override
public void clear() {
checkWrite();
wrapped.clear();
}
void checkRead() {
tcAccessController.checkReadPermission(name);
}
void checkWrite() {
tcAccessController.checkReadWritePermission(name);
}
@Override
public boolean contains(Object o) {
checkRead();
return wrapped.contains((Triple) o);
}
@Override
public Iterator<Triple> iterator() {
return filter(null, null, null);
}
@Override
public boolean containsAll(Collection<?> c) {
checkRead();
return wrapped.containsAll(c);
}
@Override
public ImmutableGraph getImmutableGraph() {
return new SimpleImmutableGraph(this);
}
@Override
public ReadWriteLock getLock() {
return wrapped.getLock();
}
/**
* Returns the wrapped Graph if the caller has all access rights.
* If the caller has only the read access right, then a write-blocked
* Graph is returned. If the caller has neither the read nor the write
* access right then an AccessControlException is thrown.
*
* @return the wrapped Graph or a write-block Graph depending
* on the access rights of the caller.
*/
public Graph getUnsecuredGraph() {
try {
checkWrite();
return wrapped;
} catch (AccessControlException ex) {
checkRead();
return new WriteBlockedGraph(wrapped);
}
}
}
| 306 |
0 | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset/WeightedTcProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.dataset;
/**
* This interface is implemented by providers to which {@link TcManager}
* delegates.
*
* @author reto
*/
public interface WeightedTcProvider extends TcProvider {
/**
* Get the weight of this provider. {@link TcManager} will prioritize
* <code>TcProvider</code>s with greater weight.
*
* @return a positive number indicating the weight of the provider
*/
int getWeight();
}
| 307 |
0 | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset/security/PermissionParser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.dataset.security;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.PushbackReader;
import java.io.StringReader;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.security.Permission;
/**
* Provides a utility method to instantiate a permission given its string
* representation as returned by <code>java security.Permission.toString</code>.
*
* @author reto
*/
public class PermissionParser {
final static Logger logger = LoggerFactory.getLogger(PermissionParser.class);
/**
* Parsers permissionDescription and instantiates the permission using
* the ClassLoader of this class.
*
* @param permissionDescription
* @return Permission
*/
public static Permission getPermission(String permissionDescription) {
return getPermission(permissionDescription, PermissionParser.class.getClassLoader());
}
/**
* Parsers permissionDescription and instantiates the permission using
* classLoader.
*
* @param permissionDescription
* @param classLoader
* @return Permission
*/
public static Permission getPermission(String permissionDescription, ClassLoader classLoader) {
PermissionInfo permissionInfo = parse(permissionDescription);
try {
Class clazz = classLoader.loadClass(permissionInfo.className);
Constructor<?> constructor = clazz.getConstructor(
String.class, String.class);
return (Permission) constructor.newInstance(
permissionInfo.name, permissionInfo.actions);
} catch (InstantiationException ie) {
logger.warn("{}", ie);
throw new RuntimeException(ie);
} catch (ClassNotFoundException cnfe) {
logger.warn("{}", cnfe);
throw new RuntimeException(cnfe);
} catch (NoSuchMethodException nsme) {
logger.warn("{}", nsme);
throw new RuntimeException(nsme);
} catch (InvocationTargetException ite) {
logger.warn("{}", ite);
throw new RuntimeException(ite);
} catch (IllegalAccessException iae) {
logger.warn("{}", iae);
throw new RuntimeException(iae);
}
}
private static PermissionInfo parse(String permissionDescription) {
StringReader reader = new StringReader(permissionDescription);
try {
return parse(reader);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
private static PermissionInfo parse(StringReader reader) throws IOException {
PermissionInfo result = new PermissionInfo();
for (int ch = reader.read(); ch != -1; ch = reader.read()) {
if (ch == ' ') {
continue;
}
if (ch =='(') {
parseFromClassName(reader, result);
break;
} else {
throw new IllegalArgumentException("Permission description does not start with '('");
}
}
for (int ch = reader.read(); ch != -1; ch = reader.read()) {
if (ch != ' ') {
throw new IllegalArgumentException("Unparsable characters after closing ')'");
}
}
return result;
}
private static void parseFromClassName(StringReader StringReader, PermissionInfo result) throws IOException {
PushbackReader reader = new PushbackReader(StringReader, 1);
result.className = readSection(reader);
result.name = readSection(reader);
result.actions = readSection(reader);
byte closingBracketsCount = 0;
for (int ch = reader.read(); ch != -1; ch = reader.read()) {
if (ch == ' ') {
continue;
}
if (ch == ')') {
closingBracketsCount++;
if (closingBracketsCount > 1) {
throw new IllegalArgumentException("more than 1 closing bracket");
}
continue;
}
else {
throw new IllegalArgumentException("illegal character at this position: "+ch);
}
}
}
private static String readSection(PushbackReader reader) throws IOException {
for (int ch = reader.read(); ch != -1; ch = reader.read()) {
if (ch == ' ') {
continue;
} else {
reader.unread(ch);
return readSectionWithNoHeadingSpace(reader);
}
}
return null;
}
private static String readSectionWithNoHeadingSpace(PushbackReader reader) throws IOException {
StringBuilder sectionWriter = new StringBuilder();
for (int ch = reader.read(); ch != -1; ch = reader.read()) {
if (ch == '"') {
if (sectionWriter.length() > 0) {
throw new IllegalArgumentException("Quote at wrong position, characters before quote: "+sectionWriter.toString());
}
sectionWriter = null;
return readTillQuote(reader);
}
if (ch == ' ') {
return sectionWriter.toString();
}
if (ch == ')') {
reader.unread(ch);
return sectionWriter.toString();
}
sectionWriter.append((char)ch);
}
throw new IllegalArgumentException("missing closing bracket (')')");
}
private static String readTillQuote(PushbackReader reader) throws IOException {
StringBuilder sectionWriter = new StringBuilder();
for (int ch = reader.read(); ch != -1; ch = reader.read()) {
if (ch == '"') {
return sectionWriter.toString();
}
sectionWriter.append((char)ch);
}
throw new IllegalArgumentException("missing closing quote ('=')");
}
private static class PermissionInfo {
String className, name, actions;
}
}
| 308 |
0 | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset/security/TcAccessController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.dataset.security;
import org.apache.clerezza.*;
import org.apache.clerezza.implementation.TripleImpl;
import org.apache.clerezza.implementation.literal.LiteralFactory;
import org.apache.clerezza.dataset.NoSuchEntityException;
import org.apache.clerezza.dataset.TcManager;
import java.security.AccessControlException;
import java.security.AccessController;
import java.security.AllPermission;
import java.security.Permission;
import java.util.*;
import java.util.concurrent.locks.Lock;
/**
* Controls the permissions needed to access a triple collection provided by
* <code>TcManager</code>.
*
* Clients with a ConfigureTcAcessPermission can set the permissions required to
* access a Graph. These permissions are stored persistently in an
* Graph named urn:x-localinstance:/graph-access.graph
*
* Clients should get an instance from TcManager.getTcAccessController()
*
* @author reto
*/
public abstract class TcAccessController {
private final TcManager tcManager;
private final IRI permissionGraphName = new IRI("urn:x-localinstance:/graph-access.graph");
//we can't rely on ontology plugin in rdf core
private String ontologyNamespace = "http://clerezza.apache.org/2010/07/10/graphpermssions#";
private final IRI readPermissionListProperty = new IRI(ontologyNamespace + "readPermissionList");
private final IRI readWritePermissionListProperty = new IRI(ontologyNamespace + "readWritePermissionList");
/**
* The first item in the subject RDF list.
*/
public static final IRI first = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#first");
/**
* The rest of the subject RDF list after the first item.
*/
public static final IRI rest = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#rest");
public static final IRI rdfNil = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#nil");
private final Map<IRI, Collection<Permission>> readPermissionCache =
Collections.synchronizedMap(new HashMap<IRI, Collection<Permission>>());
private final Map<IRI, Collection<Permission>> readWritePermissionCache =
Collections.synchronizedMap(new HashMap<IRI, Collection<Permission>>());
public TcAccessController() {
this.tcManager = getTcManager();
}
public void checkReadPermission(IRI GraphUri) {
if (GraphUri.equals(permissionGraphName)) {
//This is world readable, as this prevents as from doingf things as
//priviledged during verfification
return;
}
SecurityManager security = System.getSecurityManager();
if (security != null) {
//will AllPermissions the rest is obsolete
try {
AccessController.checkPermission(new AllPermission());
} catch (AccessControlException e) {
Collection<Permission> perms = getRequiredReadPermissions(GraphUri);
if (perms.size() > 0) {
for (Permission permission : perms) {
AccessController.checkPermission(permission);
}
} else {
AccessController.checkPermission(new TcPermission(
GraphUri.getUnicodeString(), TcPermission.READ));
}
}
}
}
public void checkReadWritePermission(IRI GraphUri) {
SecurityManager security = System.getSecurityManager();
if (security != null) {
//will AllPermissions the rest is obsolete
try {
AccessController.checkPermission(new AllPermission());
} catch (AccessControlException e) {
if (GraphUri.equals(permissionGraphName)) {
AccessController.checkPermission(new TcPermission(
GraphUri.getUnicodeString(), TcPermission.READWRITE));
} else {
Collection<Permission> perms = getRequiredReadWritePermissions(GraphUri);
if (perms.size() > 0) {
for (Permission permission : perms) {
AccessController.checkPermission(permission);
}
} else {
AccessController.checkPermission(new TcPermission(
GraphUri.getUnicodeString(), TcPermission.READWRITE));
}
}
}
}
}
/**
* Set the set of permissions required for read access to a triple-collection, if
* the set is non-empty the default TCPermisson is no longer required.
*
* @param GraphUri
* @param permissionDescriptions
*/
public void setRequiredReadPermissionStrings(IRI GraphUri,
Collection<String> permissionDescriptions) {
readPermissionCache.remove(GraphUri);
final Graph permissionMGraph = getOrCreatePermisionGraph();
Lock l = permissionMGraph.getLock().writeLock();
l.lock();
try {
removeExistingRequiredReadPermissions(GraphUri, permissionMGraph);
final BlankNodeOrIRI permissionList = createList(permissionDescriptions.iterator(), permissionMGraph);
permissionMGraph.add(new TripleImpl(GraphUri,
readPermissionListProperty, permissionList));
} finally {
l.unlock();
}
}
/**
* Set the set of permissions required for read access to a triple-collection, if
* the set is non-empty the default TCPermisson is no longer required.
*
* @param graphUri
* @param permissions
*/
public void setRequiredReadPermissions(IRI graphUri,
Collection<Permission> permissions) {
Collection<String> permissionStrings = new ArrayList<String>();
for (Permission permission : permissions) {
permissionStrings.add(permission.toString());
}
setRequiredReadPermissionStrings(graphUri, permissionStrings);
}
/**
* Set the set of permissions required for read-write access to a
* triple-collection, if
* the set is non-empty the default TCPermisson is no longer required.
*
* @param graphUri
* @param permissionDescriptions
*/
public void setRequiredReadWritePermissionStrings(IRI graphUri,
Collection<String> permissionDescriptions) {
readWritePermissionCache.remove(graphUri);
final Graph permissionMGraph = getOrCreatePermisionGraph();
Lock l = permissionMGraph.getLock().writeLock();
l.lock();
try {
removeExistingRequiredReadPermissions(graphUri, permissionMGraph);
final BlankNodeOrIRI permissionList = createList(permissionDescriptions.iterator(), permissionMGraph);
permissionMGraph.add(new TripleImpl(graphUri,
readWritePermissionListProperty, permissionList));
} finally {
l.unlock();
}
}
/**
* Set the set of permissions required for read-write access to a
* triple-collection, if
* the set is non-empty the default TCPermisson is no longer required.
*
* @param graphUri
* @param permissions
*/
public void setRequiredReadWritePermissions(IRI graphUri,
Collection<Permission> permissions) {
Collection<String> permissionStrings = new ArrayList<String>();
for (Permission permission : permissions) {
permissionStrings.add(permission.toString());
}
setRequiredReadWritePermissionStrings(graphUri, permissionStrings);
}
/**
* Get the set of permissions required for read access to the
* triple-collection, the set may be empty meaning that the default
* TCPermission is required.
*
* @param graphUri
* @return the collection of permissions
*/
public Collection<Permission> getRequiredReadPermissions(IRI graphUri) {
Collection<Permission> result = readPermissionCache.get(graphUri);
if (result == null) {
result = new ArrayList<Permission>();
Collection<String> permissionStrings = getRequiredReadPermissionStrings(graphUri);
for (String string : permissionStrings) {
result.add(PermissionParser.getPermission(string, getClass().getClassLoader()));
}
readPermissionCache.put(graphUri, result);
}
return result;
}
/**
* Get the set of permissions required for read-write access to the
* triple-collection, the set may be empty meaning that the default
* TCPermission is required.
*
* @param graphUri
* @return the collection of permissions
*/
public Collection<Permission> getRequiredReadWritePermissions(IRI graphUri) {
Collection<Permission> result = readWritePermissionCache.get(graphUri);
if (result == null) {
result = new ArrayList<Permission>();
Collection<String> permissionStrings = getRequiredReadWritePermissionStrings(graphUri);
for (String string : permissionStrings) {
result.add(PermissionParser.getPermission(string, getClass().getClassLoader()));
}
readWritePermissionCache.put(graphUri, result);
}
return result;
}
private BlankNodeOrIRI createList(Iterator<String> iterator, Graph permissionMGraph) {
if (!iterator.hasNext()) {
return rdfNil;
}
final BlankNode result = new BlankNode();
permissionMGraph.add(new TripleImpl(result, first,
LiteralFactory.getInstance().createTypedLiteral(iterator.next())));
permissionMGraph.add(new TripleImpl(result, rest,
createList(iterator, permissionMGraph)));
return result;
}
//called withiong write-lock
private void removeExistingRequiredReadPermissions(IRI graphUri,
Graph permissionMGraph) {
try {
Triple t = permissionMGraph.filter(graphUri, readPermissionListProperty, null).next();
RDFTerm list = t.getObject();
removeList((BlankNodeOrIRI) list, permissionMGraph);
permissionMGraph.remove(t);
} catch (NoSuchElementException e) {
//There was no existing to remove
}
}
private void removeList(BlankNodeOrIRI list, Graph permissionMGraph) {
try {
Triple t = permissionMGraph.filter(list, rest, null).next();
RDFTerm restList = t.getObject();
removeList((BlankNodeOrIRI) restList, permissionMGraph);
permissionMGraph.remove(t);
Iterator<Triple> iter = permissionMGraph.filter(list, first, null);
iter.next();
iter.remove();
} catch (NoSuchElementException e) {
//if it has no rest its rdf:NIL and has no first
}
}
private Collection<String> getRequiredReadWritePermissionStrings(final IRI graphUri) {
return getRequiredPermissionStrings(graphUri, readWritePermissionListProperty);
}
private Collection<String> getRequiredReadPermissionStrings(final IRI graphUri) {
return getRequiredPermissionStrings(graphUri, readPermissionListProperty);
}
private Collection<String> getRequiredPermissionStrings(final IRI graphUri, IRI property) {
try {
final Graph permissionMGraph = tcManager.getMGraph(permissionGraphName);
Lock l = permissionMGraph.getLock().readLock();
l.lock();
try {
Triple t = permissionMGraph.filter(graphUri, property, null).next();
BlankNodeOrIRI list = (BlankNodeOrIRI) t.getObject();
LinkedList<String> result = new LinkedList<String>();
readList(list, permissionMGraph, result);
return result;
} catch (NoSuchElementException e) {
return new ArrayList<String>(0);
} finally {
l.unlock();
}
} catch (NoSuchEntityException e) {
return new ArrayList<String>(0);
}
}
private void readList(BlankNodeOrIRI list, Graph permissionMGraph, LinkedList<String> target) {
if (list.equals(rdfNil)) {
return;
}
Triple restTriple = permissionMGraph.filter(list, rest, null).next();
BlankNodeOrIRI restList = (BlankNodeOrIRI) restTriple.getObject();
readList(restList, permissionMGraph, target);
Triple firstTriple = permissionMGraph.filter(list, first, null).next();
Literal firstValue = (Literal) firstTriple.getObject();
String value = LiteralFactory.getInstance().createObject(String.class, firstValue);
target.addFirst(value);
}
private Graph getOrCreatePermisionGraph() {
try {
return tcManager.getMGraph(permissionGraphName);
} catch (NoSuchEntityException e) {
return tcManager.createGraph(permissionGraphName);
}
}
/**
* Note that this will only be invoked once
* @return TcManager
*/
protected abstract TcManager getTcManager();
}
| 309 |
0 | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/dataset/security/TcPermission.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.dataset.security;
import java.security.Permission;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Pattern;
/**
* A permission to access <code>Graph</code>s matching a specified
* name pattern. A pattern is matched if and only if the pattern is equals
* to name of the <code>Graph</code> or the pattern ends with "/*" and
* the name of the <code>Graph</code> starts with the characters
* preceding the '*' in the pattern.
*
* @author reto, tsuy
*/
public class TcPermission extends Permission {
public final static String READWRITE = "readwrite";
public final static String READ = "read";
private String tcNamePattern;
/**
* true if readwrite granted false if only read
*/
private boolean allowReadWrite = false;
final static Pattern actionPattern = Pattern.compile(",( *)");
/**
* Conststructs a TcPermission for a specified name pattern and a list of
* actions.
*
* @param tcNamePattern see class description
* @param actions a comma separated list of the strings "read" and "readwrite",
* the canonical form is just "read" or "readwrite" as "readwrite"
* implies "read".
*/
public TcPermission(String tcNamePattern, String actions) {
super(tcNamePattern);
this.tcNamePattern = tcNamePattern;
//check and set actions
final Set actionSet = new HashSet(Arrays.asList(actionPattern.split(actions)));
if (actionSet.remove(READWRITE)) {
allowReadWrite = true;
} else {
if (!actionSet.contains(READ)) {
throw new IllegalArgumentException("actions must be either \"read\" or \"readwrite\"");
}
}
actionSet.remove(READ);
if (actionSet.size() > 0) {
throw new IllegalArgumentException("actions must only contain \"read\" and \"readwrite\"");
}
}
@Override
public boolean implies(Permission permission) {
if (permission instanceof TcPermission) {
TcPermission other = (TcPermission) permission;
if (!patternImplies(other.tcNamePattern)) {
return false;
}
if (!actionsImplies(other.allowReadWrite)) {
return false;
}
return true;
}
return false;
}
private boolean actionsImplies(boolean readwriteOther) {
if (!readwriteOther) {
return true;
} else {
return allowReadWrite;
}
}
private boolean patternImplies(String tcNamePatternOther) {
if (tcNamePattern.equals(tcNamePatternOther)) {
return true;
}
if (tcNamePattern.endsWith("/*")) {
return tcNamePatternOther.startsWith(
tcNamePattern.substring(0, tcNamePattern.length()-1));
}
return false;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final TcPermission other = (TcPermission) obj;
if (this.tcNamePattern != other.tcNamePattern
&& (this.tcNamePattern == null
|| !this.tcNamePattern.equals(other.tcNamePattern))) {
return false;
}
if (this.allowReadWrite != other.allowReadWrite) {
return false;
}
return true;
}
@Override
public int hashCode() {
int hash = 3;
hash = 97 * hash + (this.tcNamePattern != null ?
this.tcNamePattern.hashCode() : 0);
if (allowReadWrite) {
hash++;
}
return hash;
}
@Override
public String getActions() {
return allowReadWrite ? READWRITE : READ;
}
}
| 310 |
0 | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/simple | Create_ds/clerezza/dataset/src/main/java/org/apache/clerezza/simple/storage/SimpleTcProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.simple.storage;
import org.apache.clerezza.Graph;
import org.apache.clerezza.IRI;
import org.apache.clerezza.ImmutableGraph;
import org.apache.clerezza.implementation.in_memory.SimpleGraph;
import org.apache.clerezza.implementation.in_memory.SimpleImmutableGraph;
import org.apache.clerezza.dataset.*;
import org.osgi.service.component.annotations.Component;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
@Component(service = WeightedTcProvider.class, property = TcManager.GENERAL_PURPOSE_TC+"=true")
public class SimpleTcProvider implements WeightedTcProvider {
private Map<IRI, Graph> tripleMap = new HashMap<IRI, Graph>();
@Override
public ImmutableGraph createImmutableGraph(IRI name, Graph triples)
throws EntityAlreadyExistsException {
if ((name == null) || (name.getUnicodeString() == null)
|| (name.getUnicodeString().trim().length() == 0)) {
throw new IllegalArgumentException("Name must not be null");
}
try {
// throws NoSuchEntityException if a Graph with that name
// already exists
this.getGraph(name);
} catch (NoSuchEntityException e) {
ImmutableGraph result;
if (triples == null) {
result = new SimpleImmutableGraph(new SimpleGraph());
} else {
if (ImmutableGraph.class.isAssignableFrom(triples.getClass())) {
result = (ImmutableGraph) triples;
} else {
result = new SimpleImmutableGraph(triples);
}
}
tripleMap.put(name, result);
return result;
}
throw new EntityAlreadyExistsException(name);
}
@Override
public Graph createGraph(IRI name) throws EntityAlreadyExistsException {
if ((name == null) || (name.getUnicodeString() == null)
|| (name.getUnicodeString().trim().length() == 0)) {
throw new IllegalArgumentException("Name must not be null");
}
try {
// throws NoSuchEntityException if a Graph with that name
// already exists
this.getGraph(name);
} catch (NoSuchEntityException e) {
Graph result = new SimpleGraph();
tripleMap.put(name, result);
return result;
}
throw new EntityAlreadyExistsException(name);
}
@Override
public void deleteGraph(IRI name)
throws NoSuchEntityException, EntityUndeletableException {
if (tripleMap.remove(name) == null) {
throw new NoSuchEntityException(name);
}
}
@Override
public ImmutableGraph getImmutableGraph(IRI name) throws NoSuchEntityException {
Graph tripleCollection = tripleMap.get(name);
if (tripleCollection == null) {
throw new NoSuchEntityException(name);
} else if (ImmutableGraph.class.isAssignableFrom(tripleCollection.getClass())) {
return (ImmutableGraph) tripleCollection;
}
throw new NoSuchEntityException(name);
}
@Override
public Graph getMGraph(IRI name) throws NoSuchEntityException {
Graph tripleCollection = tripleMap.get(name);
if (tripleCollection == null) {
throw new NoSuchEntityException(name);
} else if (!ImmutableGraph.class.isAssignableFrom(tripleCollection.getClass())) {
return (Graph) tripleCollection;
}
throw new NoSuchEntityException(name);
}
@Override
public Set<IRI> getNames(ImmutableGraph graph) {
throw new UnsupportedOperationException(
"Not supported yet. equals() has to be implemented first");
}
@Override
public Graph getGraph(IRI name)
throws NoSuchEntityException {
Graph tripleCollection = tripleMap.get(name);
if (tripleCollection == null) {
throw new NoSuchEntityException(name);
} else {
return tripleCollection;
}
}
@Override
public int getWeight() {
return 1;
}
@Override
public Set<IRI> listImmutableGraphs() {
Set<IRI> result = new HashSet<IRI>();
for (IRI uriRef : listGraphs()) {
if (tripleMap.get(uriRef) instanceof ImmutableGraph) {
result.add(uriRef);
}
}
return result;
}
@Override
public Set<IRI> listMGraphs() {
Set<IRI> result = new HashSet<IRI>();
for (IRI uriRef : listGraphs()) {
if (!(tripleMap.get(uriRef) instanceof ImmutableGraph)) {
result.add(uriRef);
}
}
return result;
}
@Override
public Set<IRI> listGraphs() {
return tripleMap.keySet();
}
}
| 311 |
0 | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation/LanguageTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation;
import org.apache.clerezza.Language;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
/**
* @author reto
*/
@RunWith(JUnitPlatform.class)
public class LanguageTest {
@Test
public void languageEqualityTest() {
Language lang1 = new Language("DE");
Language lang2 = new Language("DE");
Assertions.assertEquals(lang1, lang2);
Assertions.assertEquals(lang1.hashCode(), lang2.hashCode());
Language lang3 = new Language("EN");
Assertions.assertFalse(lang1.equals(lang3));
}
@Test
public void toStringTest() {
final String id = "de";
Language lang1 = new Language(id);
Assertions.assertEquals(lang1.toString(), id);
}
}
| 312 |
0 | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation/UriRefTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation;
import org.apache.clerezza.IRI;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
/**
* @author reto
*/
@RunWith(JUnitPlatform.class)
public class UriRefTest {
@Test
public void uriRefEqualityTest() throws UnsupportedEncodingException {
String uriRefString = "http://example.org/üöä";
IRI uriRef1 = new IRI(uriRefString);
IRI uriRef2 = new IRI(uriRefString);
Assertions.assertEquals(uriRef1, uriRef2);
IRI uriRef3 = new IRI(URLEncoder.encode(uriRefString, "utf-8"));
Assertions.assertFalse(uriRef1.equals(uriRef3));
}
@Test
public void toStringTest() {
String uriRefString = "http://example.org/üöä";
IRI uriRef = new IRI(uriRefString);
Assertions.assertEquals("<" + uriRefString + ">", uriRef.toString());
}
}
| 313 |
0 | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation/graphmatching/HashMatchingTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching;
import org.apache.clerezza.BlankNodeOrIRI;
import org.apache.clerezza.BlankNode;
import org.apache.clerezza.Graph;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
import java.util.Map;
/**
* @author reto
*/
@RunWith(JUnitPlatform.class)
public class HashMatchingTest {
@Test
public void twoLine() throws GraphNotIsomorphicException {
BlankNodeOrIRI start1 = new BlankNode();
Graph tc1 = Utils4Testing.generateLine(4, start1);
tc1.addAll(Utils4Testing.generateLine(5, start1));
BlankNodeOrIRI start2 = new BlankNode();
Graph tc2 = Utils4Testing.generateLine(5, start2);
tc2.addAll(Utils4Testing.generateLine(4, start2));
Assertions.assertEquals(9, tc1.size());
final Map<BlankNode, BlankNode> mapping = new HashMatching(tc1, tc2).getMatchings();
Assertions.assertNotNull(mapping);
Assertions.assertEquals(10, mapping.size());
}
}
| 314 |
0 | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation/graphmatching/PermutationIteratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @author reto
*/
@RunWith(JUnitPlatform.class)
public class PermutationIteratorTest {
@Test
public void simple() {
List<String> list = new ArrayList<String>();
PermutationIterator<String> pi = new PermutationIterator<String>(list);
Assertions.assertFalse(pi.hasNext());
}
@Test
public void lessSimple() {
List<String> list = new ArrayList<String>();
list.add("Hasan");
PermutationIterator<String> pi = new PermutationIterator<String>(list);
Assertions.assertTrue(pi.hasNext());
}
@Test
public void regular() {
List<String> list = new ArrayList<String>();
list.add("Hasan");
list.add("Tsuy");
PermutationIterator<String> pi = new PermutationIterator<String>(list);
Set<List<String>> permutations = new HashSet<List<String>>();
while (pi.hasNext()) {
permutations.add(pi.next());
}
Assertions.assertEquals(2, permutations.size());
}
@Test
public void extended() {
List<String> list = new ArrayList<String>();
list.add("Hasan");
list.add("Tsuy");
list.add("Llena");
PermutationIterator<String> pi = new PermutationIterator<String>(list);
Set<List<String>> permutations = new HashSet<List<String>>();
while (pi.hasNext()) {
permutations.add(pi.next());
}
Assertions.assertEquals(6, permutations.size());
}
}
| 315 |
0 | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation/graphmatching/Utils4Testing.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching;
import org.apache.clerezza.BlankNode;
import org.apache.clerezza.BlankNodeOrIRI;
import org.apache.clerezza.Graph;
import org.apache.clerezza.IRI;
import org.apache.clerezza.implementation.TripleImpl;
import org.apache.clerezza.implementation.in_memory.SimpleGraph;
/**
* @author reto
*/
public class Utils4Testing {
static Graph generateLine(int size, final BlankNodeOrIRI firstNode) {
if (size < 1) {
throw new IllegalArgumentException();
}
Graph result = new SimpleGraph();
BlankNodeOrIRI lastNode = firstNode;
for (int i = 0; i < size; i++) {
final BlankNode newNode = new BlankNode();
result.add(new TripleImpl(lastNode, u1, newNode));
lastNode = newNode;
}
return result;
}
final static IRI u1 = new IRI("http://example.org/u1");
}
| 316 |
0 | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation/graphmatching/GraphMatcherTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching;
import org.apache.clerezza.BlankNode;
import org.apache.clerezza.BlankNodeOrIRI;
import org.apache.clerezza.Graph;
import org.apache.clerezza.IRI;
import org.apache.clerezza.implementation.TripleImpl;
import org.apache.clerezza.implementation.in_memory.SimpleGraph;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
import java.util.Map;
/**
* @author reto
*/
@RunWith(JUnitPlatform.class)
public class GraphMatcherTest {
final static IRI u1 = new IRI("http://example.org/u1");
@Test
public void testEmpty() {
Graph tc1 = new SimpleGraph();
Graph tc2 = new SimpleGraph();
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNotNull(mapping);
Assertions.assertEquals(0, mapping.size());
}
@Test
public void test2() {
Graph tc1 = new SimpleGraph();
tc1.add(new TripleImpl(u1, u1, u1));
Graph tc2 = new SimpleGraph();
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNull(mapping);
}
@Test
public void test3() {
Graph tc1 = new SimpleGraph();
tc1.add(new TripleImpl(u1, u1, u1));
Graph tc2 = new SimpleGraph();
tc2.add(new TripleImpl(u1, u1, u1));
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNotNull(mapping);
Assertions.assertEquals(0, mapping.size());
}
@Test
public void test4() {
Graph tc1 = new SimpleGraph();
tc1.add(new TripleImpl(u1, u1, new BlankNode()));
Graph tc2 = new SimpleGraph();
tc2.add(new TripleImpl(u1, u1, new BlankNode()));
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNotNull(mapping);
Assertions.assertEquals(1, mapping.size());
}
@Test
public void test5() {
Graph tc1 = new SimpleGraph();
tc1.add(new TripleImpl(new BlankNode(), u1, new BlankNode()));
Graph tc2 = new SimpleGraph();
tc2.add(new TripleImpl(new BlankNode(), u1, new BlankNode()));
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNotNull(mapping);
Assertions.assertEquals(2, mapping.size());
}
@Test
public void test6() {
Graph tc1 = new SimpleGraph();
final BlankNode b11 = new BlankNode();
tc1.add(new TripleImpl(new BlankNode(), u1, b11));
tc1.add(new TripleImpl(new BlankNode(), u1, b11));
Graph tc2 = new SimpleGraph();
tc2.add(new TripleImpl(new BlankNode(), u1, new BlankNode()));
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNull(mapping);
}
private Graph generateCircle(int size) {
return generateCircle(size, new BlankNode());
}
private Graph generateCircle(int size, final BlankNodeOrIRI firstNode) {
if (size < 1) {
throw new IllegalArgumentException();
}
Graph result = new SimpleGraph();
BlankNodeOrIRI lastNode = firstNode;
for (int i = 0; i < (size - 1); i++) {
final BlankNode newNode = new BlankNode();
result.add(new TripleImpl(lastNode, u1, newNode));
lastNode = newNode;
}
result.add(new TripleImpl(lastNode, u1, firstNode));
return result;
}
@Test
public void test7() {
Graph tc1 = generateCircle(2);
Graph tc2 = generateCircle(2);
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNotNull(mapping);
Assertions.assertEquals(2, mapping.size());
}
@Test
public void test8() {
Graph tc1 = generateCircle(5);
Graph tc2 = generateCircle(5);
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNotNull(mapping);
Assertions.assertEquals(5, mapping.size());
}
@Test
public void test9() {
BlankNodeOrIRI crossing = new IRI("http://example.org/");
Graph tc1 = generateCircle(2, crossing);
tc1.addAll(generateCircle(3, crossing));
Graph tc2 = generateCircle(2, crossing);
tc2.addAll(generateCircle(3, crossing));
Assertions.assertEquals(5, tc1.size());
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNotNull(mapping);
//a circle of 2 with 1 bnode and one of 2 bnodes
Assertions.assertEquals(3, mapping.size());
}
@Test
public void test10() {
BlankNodeOrIRI crossing1 = new BlankNode();
Graph tc1 = generateCircle(2, crossing1);
tc1.addAll(generateCircle(3, crossing1));
BlankNodeOrIRI crossing2 = new BlankNode();
Graph tc2 = generateCircle(2, crossing2);
tc2.addAll(generateCircle(3, crossing2));
Assertions.assertEquals(5, tc1.size());
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNotNull(mapping);
//a circle of 2 and one of 3 with one common node
Assertions.assertEquals(4, mapping.size());
}
@Test
public void test11() {
BlankNodeOrIRI crossing1 = new BlankNode();
Graph tc1 = generateCircle(2, crossing1);
tc1.addAll(generateCircle(4, crossing1));
BlankNodeOrIRI crossing2 = new BlankNode();
Graph tc2 = generateCircle(3, crossing2);
tc2.addAll(generateCircle(3, crossing2));
Assertions.assertEquals(6, tc1.size());
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNull(mapping);
}
@Test
public void test12() {
BlankNodeOrIRI start1 = new BlankNode();
Graph tc1 = Utils4Testing.generateLine(4, start1);
tc1.addAll(Utils4Testing.generateLine(5, start1));
BlankNodeOrIRI start2 = new BlankNode();
Graph tc2 = Utils4Testing.generateLine(5, start2);
tc2.addAll(Utils4Testing.generateLine(4, start2));
Assertions.assertEquals(9, tc1.size());
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNotNull(mapping);
Assertions.assertEquals(10, mapping.size());
}
@Test
public void test13() {
BlankNodeOrIRI start1 = new BlankNode();
Graph tc1 = Utils4Testing.generateLine(4, start1);
tc1.addAll(Utils4Testing.generateLine(5, start1));
BlankNodeOrIRI start2 = new BlankNode();
Graph tc2 = Utils4Testing.generateLine(3, start2);
tc2.addAll(Utils4Testing.generateLine(3, start2));
Assertions.assertEquals(9, tc1.size());
final Map<BlankNode, BlankNode> mapping = GraphMatcher.getValidMapping(tc1, tc2);
Assertions.assertNull(mapping);
}
}
| 317 |
0 | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation/literal/PlainLiteralImplTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.literal;
import org.apache.clerezza.Language;
import org.apache.clerezza.Literal;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
/**
* @author reto
*/
@RunWith(JUnitPlatform.class)
public class PlainLiteralImplTest {
@Test
public void plainLiteralEquality() {
String stringValue = "some text";
Literal literal1 = new PlainLiteralImpl(stringValue);
Literal literal2 = new PlainLiteralImpl(stringValue);
Assertions.assertEquals(literal1, literal2);
Assertions.assertEquals(literal1.hashCode(), literal2.hashCode());
Literal literal3 = new PlainLiteralImpl("something else");
Assertions.assertFalse(literal1.equals(literal3));
}
@Test
public void languageLiteralEquality() {
String stringValue = "some text";
Language lang = new Language("en-ca");
Literal literal1 = new PlainLiteralImpl(stringValue, lang);
Literal literal2 = new PlainLiteralImpl(stringValue, lang);
Assertions.assertEquals(literal1, literal2);
Assertions.assertEquals(literal1.hashCode(), literal2.hashCode());
Language lang2 = new Language("de");
Literal literal3 = new PlainLiteralImpl(stringValue, lang2);
Assertions.assertFalse(literal1.equals(literal3));
Literal literal4 = new PlainLiteralImpl(stringValue, null);
Assertions.assertFalse(literal3.equals(literal4));
Assertions.assertFalse(literal4.equals(literal3));
}
/**
* hashCode of the lexical form plus the hashCode of the locale
*/
@Test
public void checkHashCode() {
String stringValue = "some text";
Language language = new Language("en");
Literal literal = new PlainLiteralImpl(stringValue, language);
Assertions.assertEquals(literal.getDataType().hashCode() + stringValue.hashCode() + language.hashCode(), literal.hashCode());
}
}
| 318 |
0 | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation/literal/TypedLiteralImplTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.literal;
import org.apache.clerezza.Literal;
import org.apache.clerezza.IRI;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
/**
* @author reto
*/
@RunWith(JUnitPlatform.class)
public class TypedLiteralImplTest {
@Test
public void typedLiteralEquality() {
String stringValue = "some text";
IRI uriRef = new IRI("http://example.org/datatypes/magic");
Literal literal1 = new TypedLiteralImpl(stringValue, uriRef);
Literal literal2 = new TypedLiteralImpl(stringValue, uriRef);
Assertions.assertEquals(literal1, literal2);
Assertions.assertEquals(literal1.hashCode(), literal2.hashCode());
Literal literal3 = new TypedLiteralImpl("something else", uriRef);
Assertions.assertFalse(literal1.equals(literal3));
IRI uriRef2 = new IRI("http://example.org/datatypes/other");
Literal literal4 = new TypedLiteralImpl(stringValue, uriRef2);
Assertions.assertFalse(literal1.equals(literal4));
}
/**
* The hascode is equals to the hascode of the lexical form plus the hashcode of the dataTyp
*/
@Test
public void checkHashCode() {
String stringValue = "some text";
IRI uriRef = new IRI("http://example.org/datatypes/magic");
Literal literal = new TypedLiteralImpl(stringValue, uriRef);
Assertions.assertEquals(stringValue.hashCode() + uriRef.hashCode(), literal.hashCode());
}
}
| 319 |
0 | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation/literal/TripleImplTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.literal;
import org.apache.clerezza.BlankNodeOrIRI;
import org.apache.clerezza.IRI;
import org.apache.clerezza.RDFTerm;
import org.apache.clerezza.Triple;
import org.apache.clerezza.implementation.TripleImpl;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
/**
* @author reto
*/
@RunWith(JUnitPlatform.class)
public class TripleImplTest {
@Test
public void tripleEquality() {
BlankNodeOrIRI subject = new IRI("http://example.org/");
IRI predicate = new IRI("http://example.org/property");
RDFTerm object = new PlainLiteralImpl("property value");
Triple triple1 = new TripleImpl(subject, predicate, object);
Triple triple2 = new TripleImpl(subject, predicate, object);
Assertions.assertEquals(triple1.hashCode(), triple2.hashCode());
Assertions.assertEquals(triple1, triple2);
}
}
| 320 |
0 | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation/literal/LiteralFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.literal;
import org.apache.clerezza.IRI;
import org.apache.clerezza.Literal;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
import java.util.Arrays;
import java.util.Date;
/**
* @author reto
*/
@RunWith(JUnitPlatform.class)
public class LiteralFactoryTest {
/**
* Test that a NoConvertorException thrown for an unsupported convertor
*/
@Test
public void unavailableConvertor() {
Object value = new Object() {
};
Assertions.assertThrows(NoConvertorException.class, () ->
LiteralFactory.getInstance().createTypedLiteral(value)
);
}
/**
* Test conversion of byte[] to literal an back
*/
@Test
public void byteArrayConversion() {
byte[] bytes = new byte[5];
for (byte i = 0; i < bytes.length; i++) {
bytes[i] = i;
}
Literal literal = LiteralFactory.getInstance().createTypedLiteral(bytes);
Assertions.assertEquals(new IRI("http://www.w3.org/2001/XMLSchema#base64Binary"),
literal.getDataType());
//we are using bytes.getClass() but there should be a way to get
//that instance of Class without getting it from an instance
//but this is java-bug 4071439 (would like byte[].class or byte.class.getArrayType())
byte[] bytesBack = LiteralFactory.getInstance().createObject(bytes.getClass(), literal);
Assertions.assertTrue(Arrays.equals(bytes, bytesBack));
}
/**
* Test conversion of java.util.Date to literal an back
*/
@Test
public void dateConversion() {
Date date = new Date();
Literal literal = LiteralFactory.getInstance().createTypedLiteral(date);
Assertions.assertEquals(new IRI("http://www.w3.org/2001/XMLSchema#dateTime"),
literal.getDataType());
Date dateBack = LiteralFactory.getInstance().createObject(Date.class, literal);
Assertions.assertEquals(date.getTime(), dateBack.getTime());
}
/**
* Test conversion of String to literal an back
*/
@Test
public void stringConversion() {
String value = "Hello world";
Literal literal = LiteralFactory.getInstance().createTypedLiteral(value);
Assertions.assertEquals(new IRI("http://www.w3.org/2001/XMLSchema#string"),
literal.getDataType());
String valueBack = LiteralFactory.getInstance().createObject(String.class, literal);
Assertions.assertEquals(value, valueBack);
}
/**
* Test conversion of Integer to literal an back
*/
@Test
public void intConversion() {
int value = 3;
Literal literal = LiteralFactory.getInstance().createTypedLiteral(value);
Assertions.assertEquals(new IRI("http://www.w3.org/2001/XMLSchema#int"),
literal.getDataType());
Integer valueBack = LiteralFactory.getInstance().createObject(Integer.class, literal);
Assertions.assertEquals(value, valueBack.intValue());
}
/**
* Test conversion of Long to literal an back
*/
@Test
public void longConversion() {
long value = 332314646;
Literal literal = LiteralFactory.getInstance().createTypedLiteral(value);
Assertions.assertEquals(new IRI("http://www.w3.org/2001/XMLSchema#long"),
literal.getDataType());
Long valueBack = LiteralFactory.getInstance().createObject(Long.class, literal);
Assertions.assertEquals(value, valueBack.longValue());
}
}
| 321 |
0 | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation/graph/GraphTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graph;
import org.apache.clerezza.*;
import org.apache.clerezza.implementation.TripleImpl;
import org.apache.clerezza.implementation.literal.PlainLiteralImpl;
import org.apache.clerezza.implementation.literal.TypedLiteralImpl;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
import java.util.*;
/**
* A generic abstract test class, implementations overwrite this class,
* providing an implementation of the getEmptyGraph method.
*
* @author reto, szalay, mir, hhn
*/
@RunWith(JUnitPlatform.class)
public abstract class GraphTest {
private final IRI uriRef1 =
new IRI("http://example.org/ontology#res1");
private final IRI uriRef2 =
new IRI("http://example.org/ontology#res2");
private final IRI uriRef3 =
new IRI("http://example.org/ontology#res3");
private final IRI uriRef4 =
new IRI("http://example.org/ontology#res4");
private final IRI xmlLiteralType =
new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#XMLLiteral");
private Literal literal1 = new PlainLiteralImpl("literal1");
private Literal literal2 = new PlainLiteralImpl("literal2");
private BlankNode bnode1 = new BlankNode();
private BlankNode bnode2 = new BlankNode();
private Triple trpl1 = new TripleImpl(uriRef2, uriRef2, literal1);
private Triple trpl2 = new TripleImpl(uriRef1, uriRef2, uriRef1);
private Triple trpl3 = new TripleImpl(bnode2, uriRef3, literal2);
private Triple trpl4 = new TripleImpl(uriRef3, uriRef4, literal2);
/**
* Subclasses implement this method to provide implementation instances of
* Graph. This method may be called an arbitrary amount of time,
* independently whether previously returned Graph are still in use or not.
*
* @return an empty Graph of the implementation to be tested
*/
protected abstract Graph getEmptyGraph();
@Test
public void testAddCountAndGetTriples() {
Graph graph = getEmptyGraph();
Assertions.assertEquals(0, graph.size());
final TripleImpl triple1 = new TripleImpl(uriRef1, uriRef2, uriRef1);
graph.add(triple1);
Assertions.assertEquals(1, graph.size());
Iterator<Triple> tripleIter = graph.filter(uriRef1, uriRef2, uriRef1);
Assertions.assertTrue(tripleIter.hasNext());
Triple tripleGot = tripleIter.next();
Assertions.assertEquals(triple1, tripleGot);
Assertions.assertFalse(tripleIter.hasNext());
BlankNode bnode = new BlankNode() {
};
graph.add(new TripleImpl(bnode, uriRef1, uriRef3));
graph.add(new TripleImpl(bnode, uriRef1, uriRef4));
tripleIter = graph.filter(null, uriRef1, null);
Set<BlankNodeOrIRI> subjectInMatchingTriples = new HashSet<BlankNodeOrIRI>();
Set<RDFTerm> objectsInMatchingTriples = new HashSet<RDFTerm>();
while (tripleIter.hasNext()) {
Triple triple = tripleIter.next();
subjectInMatchingTriples.add(triple.getSubject());
objectsInMatchingTriples.add(triple.getObject());
}
Assertions.assertEquals(1, subjectInMatchingTriples.size());
Assertions.assertEquals(2, objectsInMatchingTriples.size());
Set<RDFTerm> expectedObjects = new HashSet<RDFTerm>();
expectedObjects.add(uriRef3);
expectedObjects.add(uriRef4);
Assertions.assertEquals(expectedObjects, objectsInMatchingTriples);
graph.add(new TripleImpl(bnode, uriRef4, bnode));
tripleIter = graph.filter(null, uriRef4, null);
Assertions.assertTrue(tripleIter.hasNext());
Triple retrievedTriple = tripleIter.next();
Assertions.assertFalse(tripleIter.hasNext());
Assertions.assertEquals(retrievedTriple.getSubject(), retrievedTriple.getObject());
tripleIter = graph.filter(uriRef1, uriRef2, null);
Assertions.assertTrue(tripleIter.hasNext());
retrievedTriple = tripleIter.next();
Assertions.assertFalse(tripleIter.hasNext());
Assertions.assertEquals(retrievedTriple.getSubject(), retrievedTriple.getObject());
}
@Test
public void testRemoveAllTriples() {
Graph graph = getEmptyGraph();
Assertions.assertEquals(0, graph.size());
graph.add(new TripleImpl(uriRef1, uriRef2, uriRef3));
graph.add(new TripleImpl(uriRef2, uriRef3, uriRef4));
Assertions.assertEquals(2, graph.size());
graph.clear();
Assertions.assertEquals(0, graph.size());
}
@Test
public void testUseTypedLiterals() {
Graph graph = getEmptyGraph();
Assertions.assertEquals(0, graph.size());
Literal value = new TypedLiteralImpl("<elem>value</elem>", xmlLiteralType);
final TripleImpl triple1 = new TripleImpl(uriRef1, uriRef2, value);
graph.add(triple1);
Iterator<Triple> tripleIter = graph.filter(uriRef1, uriRef2, null);
Assertions.assertTrue(tripleIter.hasNext());
RDFTerm gotValue = tripleIter.next().getObject();
Assertions.assertEquals(value, gotValue);
}
@Test
public void testUseLanguageLiterals() {
Graph graph = getEmptyGraph();
Assertions.assertEquals(0, graph.size());
Language language = new Language("it");
Literal value = new PlainLiteralImpl("<elem>value</elem>", language);
final TripleImpl triple1 = new TripleImpl(uriRef1, uriRef2, value);
graph.add(triple1);
Iterator<Triple> tripleIter = graph.filter(uriRef1, uriRef2, null);
Assertions.assertTrue(tripleIter.hasNext());
RDFTerm gotValue = tripleIter.next().getObject();
Assertions.assertEquals(value, gotValue);
Assertions.assertEquals(language, ((Literal) gotValue).getLanguage());
}
@Test
public void testRemoveViaIterator() {
Graph graph = getEmptyGraph();
Assertions.assertEquals(0, graph.size());
final TripleImpl triple1 = new TripleImpl(uriRef1, uriRef2, uriRef1);
graph.add(triple1);
final TripleImpl triple2 = new TripleImpl(uriRef1, uriRef2, uriRef4);
graph.add(triple2);
Assertions.assertEquals(2, graph.size());
Iterator<Triple> iterator = graph.iterator();
while (iterator.hasNext()) {
iterator.next();
iterator.remove();
}
Assertions.assertEquals(0, graph.size());
}
@Test
public void testGetSize() throws Exception {
Graph graph = getEmptyGraph();
// The test graph must always be empty after test fixture setup
Assertions.assertEquals(0, graph.size());
}
@Test
public void testAddSingleTriple() throws Exception {
Graph graph = getEmptyGraph();
final Triple triple = createTriple(
"http://example.org/ontology/Person",
"http://example.org/ontology/hasName",
"http://example.org/people/alice");
Assertions.assertEquals(0, graph.size());
Assertions.assertTrue(graph.add(triple));
Assertions.assertEquals(1, graph.size());
}
@Test
public void testAddSameTripleTwice() throws Exception {
Graph graph = getEmptyGraph();
final Triple triple = createTriple(
"http://example.org/ontology/Person",
"http://example.org/ontology/hasName",
"http://example.org/people/alice");
Assertions.assertEquals(0, graph.size());
Assertions.assertTrue(graph.add(triple));
Assertions.assertFalse(graph.add(triple)); // ImmutableGraph does not change
Assertions.assertEquals(1, graph.size());
}
@Test
public void testRemoveSingleTriple() throws Exception {
Graph graph = getEmptyGraph();
final Triple triple = createTriple(
"http://example.org/ontology/Person",
"http://example.org/ontology/hasName",
"http://example.org/people/alice");
Assertions.assertTrue(graph.add(triple));
Assertions.assertTrue(graph.remove(triple));
Assertions.assertEquals(0, graph.size());
}
@Test
public void testRemoveSameTripleTwice() throws Exception {
Graph graph = getEmptyGraph();
final Triple tripleAlice = createTriple(
"http://example.org/ontology/Person",
"http://example.org/ontology/hasName",
"http://example.org/people/alice");
final Triple tripleBob = createTriple(
"http://example.org/ontology/Person",
"http://example.org/ontology/hasName",
"http://example.org/people/bob");
Assertions.assertTrue(graph.add(tripleAlice));
Assertions.assertTrue(graph.add(tripleBob));
Assertions.assertTrue(graph.remove(tripleAlice));
Assertions.assertFalse(graph.remove(tripleAlice));
Assertions.assertEquals(1, graph.size());
}
@Test
public void testGetSameBlankNode() throws Exception {
Graph graph = getEmptyGraph();
BlankNode bNode = new BlankNode();
final IRI HAS_NAME = new IRI("http://example.org/ontology/hasName");
final PlainLiteralImpl name = new PlainLiteralImpl("http://example.org/people/alice");
final PlainLiteralImpl name2 = new PlainLiteralImpl("http://example.org/people/bob");
final Triple tripleAlice = new TripleImpl(bNode, HAS_NAME, name);
final Triple tripleBob = new TripleImpl(bNode, HAS_NAME, name2);
Assertions.assertTrue(graph.add(tripleAlice));
Assertions.assertTrue(graph.add(tripleBob));
Iterator<Triple> result = graph.filter(null, HAS_NAME, name);
Assertions.assertEquals(bNode, result.next().getSubject());
}
@Test
public void testContainsIfContained() throws Exception {
Graph graph = getEmptyGraph();
final Triple triple = createTriple(
"http://example.org/ontology/Person",
"http://example.org/ontology/hasName",
"http://example.org/people/alice");
Assertions.assertTrue(graph.add(triple));
Assertions.assertTrue(graph.contains(triple));
}
@Test
public void testContainsIfEmpty() throws Exception {
Graph graph = getEmptyGraph();
final Triple triple = createTriple(
"http://example.org/ontology/Person",
"http://example.org/ontology/hasName",
"http://example.org/people/alice");
Assertions.assertFalse(graph.contains(triple));
}
@Test
public void testContainsIfNotContained() throws Exception {
Graph graph = getEmptyGraph();
final Triple tripleAdd = createTriple(
"http://example.org/ontology/Person",
"http://example.org/ontology/hasName",
"http://example.org/people/alice");
final Triple tripleTest = createTriple(
"http://example.org/ontology/Person",
"http://example.org/ontology/hasName",
"http://example.org/people/bob");
Assertions.assertTrue(graph.add(tripleAdd));
Assertions.assertFalse(graph.contains(tripleTest));
}
@Test
public void testFilterEmptyGraph() throws Exception {
Graph graph = getEmptyGraph();
Iterator<Triple> i = graph.filter(null, null, null);
Assertions.assertFalse(i.hasNext());
}
@Test
public void testFilterSingleEntry() throws Exception {
Graph graph = getEmptyGraph();
final Triple triple = createTriple(
"http://example.org/ontology/Person",
"http://example.org/ontology/hasName",
"http://example.org/people/alice");
Assertions.assertTrue(graph.add(triple));
Iterator<Triple> i = graph.filter(null, null, null);
Collection<Triple> resultSet = toCollection(i);
Assertions.assertEquals(1, resultSet.size());
Assertions.assertTrue(resultSet.contains(triple));
}
@Test
public void testFilterByObject() throws Exception {
Graph graph = getEmptyGraph();
final Triple tripleAlice = createTriple(
"http://example.org/ontology/Person",
"http://example.org/ontology/hasName",
"http://example.org/people/alice");
final Triple tripleBob = createTriple(
"http://example.org/ontology/Person",
"http://example.org/ontology/hasName",
"http://example.org/people/bob");
Assertions.assertTrue(graph.add(tripleAlice));
Assertions.assertTrue(graph.add(tripleBob));
Iterator<Triple> iterator;
Collection<Triple> resultSet;
// Find bob
iterator = graph.filter(null, null,
new IRI("http://example.org/people/bob"));
resultSet = toCollection(iterator);
Assertions.assertEquals(1, resultSet.size());
Assertions.assertTrue(resultSet.contains(tripleBob));
// Find alice
iterator = graph.filter(null, null,
new IRI("http://example.org/people/alice"));
resultSet = toCollection(iterator);
Assertions.assertEquals(1, resultSet.size());
Assertions.assertTrue(resultSet.contains(tripleAlice));
// Find both
iterator = graph.filter(null, null, null);
resultSet = toCollection(iterator);
Assertions.assertEquals(2, resultSet.size());
Assertions.assertTrue(resultSet.contains(tripleAlice));
Assertions.assertTrue(resultSet.contains(tripleBob));
}
/*
@Test
public void graphEventTestAddRemove() {
Graph mGraph = getEmptyGraph();
TestGraphListener listener = new TestGraphListener();
mGraph.addGraphListener(listener, new FilterTriple(uriRef1, uriRef2, null));
mGraph.addGraphListener(listener, new FilterTriple(bnode2, null, literal2));
mGraph.addGraphListener(listener, new FilterTriple(null, uriRef4, literal2));
mGraph.add(trpl1);
Assertions.assertNull(listener.getEvents());
mGraph.add(trpl2);
Assertions.assertEquals(1, listener.getEvents().size());
Assertions.assertEquals(trpl2, listener.getEvents().get(0).getTriple());
Assertions.assertTrue(listener.getEvents().get(0) instanceof AddEvent);
listener.resetEvents();
mGraph.remove(trpl2);
Assertions.assertEquals(1, listener.getEvents().size());
Assertions.assertEquals(trpl2, listener.getEvents().get(0).getTriple());
Assertions.assertTrue(listener.getEvents().get(0) instanceof RemoveEvent);
listener.resetEvents();
mGraph.add(trpl3);
Assertions.assertEquals(1, listener.getEvents().size());
Assertions.assertEquals(trpl3, listener.getEvents().get(0).getTriple());
Assertions.assertTrue(listener.getEvents().get(0) instanceof AddEvent);
listener.resetEvents();
mGraph.remove(trpl4);
Assertions.assertNull(listener.getEvents());
}
@Test
public void graphEventTestAddAllRemoveAll() {
Graph mGraph = getEmptyGraph();
TestGraphListener listener = new TestGraphListener();
mGraph.addGraphListener(listener, new FilterTriple(uriRef1, uriRef2, null));
mGraph.addGraphListener(listener, new FilterTriple(bnode2, null, literal2));
mGraph.addGraphListener(listener, new FilterTriple(null, uriRef4, literal2));
Graph triples = new SimpleGraph();
triples.add(trpl1);
triples.add(trpl2);
triples.add(trpl3);
triples.add(trpl4);
mGraph.addAll(triples);
List<GraphEvent> cumulatedEvents = listener.getCumulatedEvents();
Set<Triple> cumulatedTriples = getCumulatedTriples(cumulatedEvents);
Assertions.assertEquals(3, cumulatedEvents.size());
Assertions.assertTrue(cumulatedEvents.get(0) instanceof AddEvent);
Assertions.assertTrue(cumulatedTriples.contains(trpl2));
Assertions.assertTrue(cumulatedTriples.contains(trpl3));
Assertions.assertTrue(cumulatedTriples.contains(trpl4));
listener.resetCumulatedEvents();
mGraph.removeAll(triples);
cumulatedEvents = listener.getCumulatedEvents();
cumulatedTriples = getCumulatedTriples(cumulatedEvents);
Assertions.assertEquals(3, cumulatedEvents.size());
Assertions.assertTrue(cumulatedEvents.get(0) instanceof RemoveEvent);
Assertions.assertTrue(cumulatedTriples.contains(trpl2));
Assertions.assertTrue(cumulatedTriples.contains(trpl3));
Assertions.assertTrue(cumulatedTriples.contains(trpl4));
}
@Test
public void graphEventTestFilterRemove() {
Graph mGraph = getEmptyGraph();
TestGraphListener listener = new TestGraphListener();
mGraph.addGraphListener(listener, new FilterTriple(uriRef1, uriRef2, null));
mGraph.addGraphListener(listener, new FilterTriple(bnode2, null, literal2));
mGraph.addGraphListener(listener, new FilterTriple(null, uriRef4, literal2));
mGraph.add(trpl1);
mGraph.add(trpl2);
mGraph.add(trpl3);
mGraph.add(trpl4);
listener.resetCumulatedEvents();
Iterator<Triple> result = mGraph.filter(null, uriRef2, null);
while (result.hasNext()) {
result.next();
result.remove();
}
List<GraphEvent> cumulatedEvents = listener.getCumulatedEvents();
Assertions.assertEquals(1, cumulatedEvents.size());
Assertions.assertTrue(cumulatedEvents.get(0) instanceof RemoveEvent);
Assertions.assertEquals(trpl2, listener.getEvents().get(0).getTriple());
}
@Test
public void graphEventTestIteratorRemove() {
Graph mGraph = getEmptyGraph();
TestGraphListener listener = new TestGraphListener();
mGraph.addGraphListener(listener, new FilterTriple(uriRef1, uriRef2, null));
mGraph.addGraphListener(listener, new FilterTriple(bnode2, null, literal2));
mGraph.addGraphListener(listener, new FilterTriple(null, uriRef4, literal2));
mGraph.add(trpl1);
mGraph.add(trpl2);
mGraph.add(trpl3);
mGraph.add(trpl4);
listener.resetCumulatedEvents();
Iterator<Triple> result = mGraph.iterator();
while (result.hasNext()) {
result.next();
result.remove();
}
List<GraphEvent> cumulatedEvents = listener.getCumulatedEvents();
Set<Triple> cumulatedTriples = getCumulatedTriples(cumulatedEvents);
Assertions.assertEquals(3, cumulatedEvents.size());
Assertions.assertTrue(cumulatedEvents.get(0) instanceof RemoveEvent);
Assertions.assertTrue(cumulatedTriples.contains(trpl2));
Assertions.assertTrue(cumulatedTriples.contains(trpl3));
Assertions.assertTrue(cumulatedTriples.contains(trpl4));
}
@Test
public void graphEventTestClear() {
Graph mGraph = getEmptyGraph();
TestGraphListener listener = new TestGraphListener();
mGraph.addGraphListener(listener, new FilterTriple(uriRef1, uriRef2, null));
mGraph.addGraphListener(listener, new FilterTriple(bnode2, null, literal2));
mGraph.addGraphListener(listener, new FilterTriple(null, uriRef4, literal2));
mGraph.add(trpl1);
mGraph.add(trpl2);
mGraph.add(trpl3);
mGraph.add(trpl4);
listener.resetCumulatedEvents();
mGraph.clear();
List<GraphEvent> cumulatedEvents = listener.getCumulatedEvents();
Set<Triple> cumulatedTriples = getCumulatedTriples(cumulatedEvents);
Assertions.assertEquals(3, cumulatedEvents.size());
Assertions.assertTrue(cumulatedEvents.get(0) instanceof RemoveEvent);
Assertions.assertTrue(cumulatedTriples.contains(trpl2));
Assertions.assertTrue(cumulatedTriples.contains(trpl3));
Assertions.assertTrue(cumulatedTriples.contains(trpl4));
}
private Set<Triple> getCumulatedTriples(List<GraphEvent> cumulatedEvents) {
Set<Triple> triples = new HashSet<Triple>();
for(GraphEvent event: cumulatedEvents) {
triples.add(event.getTriple());
}
return triples;
}
@Test
public void graphEventTestWithDelay() throws Exception{
Graph mGraph = getEmptyGraph();
TestGraphListener listener = new TestGraphListener();
mGraph.addGraphListener(listener, new FilterTriple(uriRef1, uriRef2, null),
1000);
Triple triple0 = new TripleImpl(uriRef2, uriRef2, literal1);
Triple triple1 = new TripleImpl(uriRef1, uriRef2, uriRef1);
Triple triple2 = new TripleImpl(uriRef1, uriRef2, literal1);
Triple triple3 = new TripleImpl(uriRef1, uriRef2, bnode1);
mGraph.add(triple0);
mGraph.add(triple1);
mGraph.add(triple2);
mGraph.add(triple3);
Thread.sleep(1500);
Assertions.assertEquals(3, listener.getEvents().size());
Assertions.assertEquals(triple1, listener.getEvents().get(0).getTriple());
Assertions.assertTrue(listener.getEvents().get(0) instanceof AddEvent);
Assertions.assertEquals(triple2, listener.getEvents().get(1).getTriple());
Assertions.assertTrue(listener.getEvents().get(0) instanceof AddEvent);
Assertions.assertEquals(triple3, listener.getEvents().get(2).getTriple());
Assertions.assertTrue(listener.getEvents().get(0) instanceof AddEvent);
}
private static class TestGraphListener implements GraphListener {
private List<GraphEvent> events = null;
private List<GraphEvent> cumulatedEvents = new ArrayList<GraphEvent>();
@Override
public void graphChanged(List<GraphEvent> events) {
this.events = events;
Iterator<GraphEvent> eventIter = events.iterator();
while (eventIter.hasNext()) {
GraphEvent graphEvent = eventIter.next();
this.cumulatedEvents.add(graphEvent);
}
}
public List<GraphEvent> getEvents() {
return events;
}
public List<GraphEvent> getCumulatedEvents() {
return cumulatedEvents;
}
public void resetEvents() {
events = null;
}
public void resetCumulatedEvents() {
cumulatedEvents = new ArrayList<GraphEvent>();
}
}
*/
private Collection<Triple> toCollection(Iterator<Triple> iterator) {
Collection<Triple> result = new ArrayList<Triple>();
while (iterator.hasNext()) {
result.add(iterator.next());
}
return result;
}
/**
* Creates a new <code>Triple</code>.
*
* @param subject the subject.
* @param predicate the predicate.
* @param object the object.
* @throws IllegalArgumentException If an attribute is <code>null</code>.
*/
private Triple createTriple(String subject, String predicate,
String object) {
return new TripleImpl(new IRI(subject), new IRI(predicate),
new IRI(object));
}
}
| 322 |
0 | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/test/java/org/apache/clerezza/implementation/graph/SimpleGraphTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graph;
import org.apache.clerezza.IRI;
import org.apache.clerezza.Triple;
import org.apache.clerezza.implementation.TripleImpl;
import org.apache.clerezza.implementation.in_memory.SimpleGraph;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
/**
* @author mir
*/
@RunWith(JUnitPlatform.class)
public class SimpleGraphTest {
private IRI uriRef1 = new IRI("http://example.org/foo");
private IRI uriRef2 = new IRI("http://example.org/bar");
private IRI uriRef3 = new IRI("http://example.org/test");
private Triple triple1 = new TripleImpl(uriRef1, uriRef2, uriRef3);
private Triple triple2 = new TripleImpl(uriRef2, uriRef2, uriRef1);
private Triple triple3 = new TripleImpl(uriRef3, uriRef1, uriRef3);
private Triple triple4 = new TripleImpl(uriRef1, uriRef3, uriRef2);
private Triple triple5 = new TripleImpl(uriRef2, uriRef3, uriRef2);
@Test
public void iteratorRemove() {
SimpleGraph stc = new SimpleGraph();
stc.add(triple1);
stc.add(triple2);
stc.add(triple3);
stc.add(triple4);
stc.add(triple5);
Iterator<Triple> iter = stc.iterator();
while (iter.hasNext()) {
Triple triple = iter.next();
iter.remove();
}
Assertions.assertEquals(0, stc.size());
}
@Test
public void removeAll() {
SimpleGraph stc = new SimpleGraph();
stc.add(triple1);
stc.add(triple2);
stc.add(triple3);
stc.add(triple4);
stc.add(triple5);
SimpleGraph stc2 = new SimpleGraph();
stc2.add(triple1);
stc2.add(triple3);
stc2.add(triple5);
stc.removeAll(stc2);
Assertions.assertEquals(2, stc.size());
}
@Test
public void filterIteratorRemove() {
SimpleGraph stc = new SimpleGraph();
stc.add(triple1);
stc.add(triple2);
stc.add(triple3);
stc.add(triple4);
stc.add(triple5);
Iterator<Triple> iter = stc.filter(uriRef1, null, null);
while (iter.hasNext()) {
Triple triple = iter.next();
iter.remove();
}
Assertions.assertEquals(3, stc.size());
}
@Test
public void remove() {
SimpleGraph stc = new SimpleGraph();
stc.setCheckConcurrency(true);
stc.add(triple1);
stc.add(triple2);
stc.add(triple3);
stc.add(triple4);
stc.add(triple5);
Iterator<Triple> iter = stc.filter(uriRef1, null, null);
Assertions.assertThrows(ConcurrentModificationException.class, () -> {
while (iter.hasNext()) {
Triple triple = iter.next();
stc.remove(triple);
}
});
Assertions.assertEquals(4, stc.size());
}
}
| 323 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/TripleImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation;
import org.apache.clerezza.BlankNodeOrIRI;
import org.apache.clerezza.IRI;
import org.apache.clerezza.RDFTerm;
import org.apache.clerezza.Triple;
/**
* @author reto
*/
public class TripleImpl implements Triple {
private final BlankNodeOrIRI subject;
private final IRI predicate;
private final RDFTerm object;
/**
* Creates a new <code>TripleImpl</code>.
*
* @param subject the subject.
* @param predicate the predicate.
* @param object the object.
* @throws IllegalArgumentException if an attribute is <code>null</code>.
*/
public TripleImpl(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) {
if (subject == null) {
throw new IllegalArgumentException("Invalid subject: null");
} else if (predicate == null) {
throw new IllegalArgumentException("Invalid predicate: null");
} else if (object == null) {
throw new IllegalArgumentException("Invalid object: null");
}
this.subject = subject;
this.predicate = predicate;
this.object = object;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof Triple)) {
return false;
}
final Triple other = (Triple) obj;
if (!this.subject.equals(other.getSubject())) {
return false;
}
if (!this.predicate.equals(other.getPredicate())) {
return false;
}
if (!this.object.equals(other.getObject())) {
return false;
}
return true;
}
@Override
public int hashCode() {
return (subject.hashCode() >> 1) ^ predicate.hashCode() ^ (object.hashCode() << 1);
}
@Override
public BlankNodeOrIRI getSubject() {
return subject;
}
public IRI getPredicate() {
return predicate;
}
public RDFTerm getObject() {
return object;
}
@Override
public String toString() {
return subject + " " + predicate + " " + object + ".";
}
}
| 324 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/package-info.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation; | 325 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching/HashMatching.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching;
import org.apache.clerezza.*;
import org.apache.clerezza.implementation.TripleImpl;
import org.apache.clerezza.implementation.graphmatching.collections.IntHashMap;
import org.apache.clerezza.implementation.graphmatching.collections.IntIterator;
import java.util.*;
/**
* @author reto
*/
public class HashMatching {
private Map<BlankNode, BlankNode> matchings = new HashMap<BlankNode, BlankNode>();
private Map<Set<BlankNode>, Set<BlankNode>> matchingGroups;
/**
* tc1 and tc2 will be modified: the triples containing no unmatched bnode
* will be removed
*
* @param tc1
* @param tc2
* @throws GraphNotIsomorphicException
*/
HashMatching(Graph tc1, Graph tc2) throws GraphNotIsomorphicException {
int foundMatchings = 0;
int foundMatchingGroups = 0;
Map<BlankNode, Integer> bNodeHashMap = new HashMap<BlankNode, Integer>();
while (true) {
bNodeHashMap = matchByHashes(tc1, tc2, bNodeHashMap);
if (bNodeHashMap == null) {
throw new GraphNotIsomorphicException();
}
if (matchings.size() == foundMatchings) {
if (!(matchingGroups.size() > foundMatchingGroups)) {
break;
}
}
foundMatchings = matchings.size();
foundMatchingGroups = matchingGroups.size();
}
}
/**
* @return a map containing set of which each bnodes mappes one of the other set
*/
public Map<Set<BlankNode>, Set<BlankNode>> getMatchingGroups() {
return matchingGroups;
}
public Map<BlankNode, BlankNode> getMatchings() {
return matchings;
}
private static IntHashMap<Set<BlankNode>> getHashNodes(Map<BlankNode,
Set<Property>> bNodePropMap, Map<BlankNode, Integer> bNodeHashMap) {
IntHashMap<Set<BlankNode>> result = new IntHashMap<Set<BlankNode>>();
for (Map.Entry<BlankNode, Set<Property>> entry : bNodePropMap.entrySet()) {
int hash = computeHash(entry.getValue(), bNodeHashMap);
Set<BlankNode> bNodeSet = result.get(hash);
if (bNodeSet == null) {
bNodeSet = new HashSet<BlankNode>();
result.put(hash, bNodeSet);
}
bNodeSet.add(entry.getKey());
}
return result;
}
/*
* returns a Map from bnodes to hash that can be used for future
* refinements, this could be separate for each ImmutableGraph.
*
* triples no longer containing an unmatched bnodes ae removed.
*
* Note that the matched node are not guaranteed to be equals, but only to
* be the correct if the graphs are isomorphic.
*/
private Map<BlankNode, Integer> matchByHashes(Graph g1, Graph g2,
Map<BlankNode, Integer> bNodeHashMap) {
Map<BlankNode, Set<Property>> bNodePropMap1 = getBNodePropMap(g1);
Map<BlankNode, Set<Property>> bNodePropMap2 = getBNodePropMap(g2);
IntHashMap<Set<BlankNode>> hashNodeMap1 = getHashNodes(bNodePropMap1, bNodeHashMap);
IntHashMap<Set<BlankNode>> hashNodeMap2 = getHashNodes(bNodePropMap2, bNodeHashMap);
if (!hashNodeMap1.keySet().equals(hashNodeMap2.keySet())) {
return null;
}
matchingGroups = new HashMap<Set<BlankNode>, Set<BlankNode>>();
IntIterator hashIter = hashNodeMap1.keySet().intIterator();
while (hashIter.hasNext()) {
int hash = hashIter.next();
Set<BlankNode> nodes1 = hashNodeMap1.get(hash);
Set<BlankNode> nodes2 = hashNodeMap2.get(hash);
if (nodes1.size() != nodes2.size()) {
return null;
}
if (nodes1.size() != 1) {
matchingGroups.put(nodes1, nodes2);
continue;
}
final BlankNode bNode1 = nodes1.iterator().next();
final BlankNode bNode2 = nodes2.iterator().next();
matchings.put(bNode1, bNode2);
//in the graphs replace node occurences with grounded node,
BlankNodeOrIRI mappedNode = new MappedNode(bNode1, bNode2);
replaceNode(g1, bNode1, mappedNode);
replaceNode(g2, bNode2, mappedNode);
//remove grounded triples
if (!Utils.removeGrounded(g1, g2)) {
return null;
}
}
Map<BlankNode, Integer> result = new HashMap<BlankNode, Integer>();
addInverted(result, hashNodeMap1);
addInverted(result, hashNodeMap2);
return result;
}
private static int computeHash(Set<Property> propertySet, Map<BlankNode, Integer> bNodeHashMap) {
int result = 0;
for (Property property : propertySet) {
result += property.hashCode(bNodeHashMap);
}
return result;
}
private static Map<BlankNode, Set<Property>> getBNodePropMap(Graph g) {
Set<BlankNode> bNodes = Utils.getBNodes(g);
Map<BlankNode, Set<Property>> result = new HashMap<BlankNode, Set<Property>>();
for (BlankNode bNode : bNodes) {
result.put(bNode, getProperties(bNode, g));
}
return result;
}
private static Set<Property> getProperties(BlankNode bNode, Graph g) {
Set<Property> result = new HashSet<Property>();
Iterator<Triple> ti = g.filter(bNode, null, null);
while (ti.hasNext()) {
Triple triple = ti.next();
result.add(new ForwardProperty(triple.getPredicate(), triple.getObject()));
}
ti = g.filter(null, null, bNode);
while (ti.hasNext()) {
Triple triple = ti.next();
result.add(new BackwardProperty(triple.getSubject(), triple.getPredicate()));
}
return result;
}
private static int nodeHash(RDFTerm resource, Map<BlankNode, Integer> bNodeHashMap) {
if (resource instanceof BlankNode) {
Integer mapValue = bNodeHashMap.get((BlankNode) resource);
if (mapValue == null) {
return 0;
} else {
return mapValue;
}
} else {
return resource.hashCode();
}
}
private static void replaceNode(Graph graph, BlankNode bNode, BlankNodeOrIRI replacementNode) {
Set<Triple> triplesToRemove = new HashSet<Triple>();
for (Triple triple : graph) {
Triple replacementTriple = getReplacement(triple, bNode, replacementNode);
if (replacementTriple != null) {
triplesToRemove.add(triple);
graph.add(replacementTriple);
}
}
graph.removeAll(triplesToRemove);
}
private static Triple getReplacement(Triple triple, BlankNode bNode, BlankNodeOrIRI replacementNode) {
if (triple.getSubject().equals(bNode)) {
if (triple.getObject().equals(bNode)) {
return new TripleImpl(replacementNode, triple.getPredicate(), replacementNode);
} else {
return new TripleImpl(replacementNode, triple.getPredicate(), triple.getObject());
}
} else {
if (triple.getObject().equals(bNode)) {
return new TripleImpl(triple.getSubject(), triple.getPredicate(), replacementNode);
} else {
return null;
}
}
}
private static void addInverted(Map<BlankNode, Integer> result, IntHashMap<Set<BlankNode>> hashNodeMap) {
for (int hash : hashNodeMap.keySet()) {
Set<BlankNode> bNodes = hashNodeMap.get(hash);
for (BlankNode bNode : bNodes) {
result.put(bNode, hash);
}
}
}
private static class BackwardProperty implements Property {
private BlankNodeOrIRI subject;
private IRI predicate;
public BackwardProperty(BlankNodeOrIRI subject, IRI predicate) {
this.subject = subject;
this.predicate = predicate;
}
@Override
public int hashCode(Map<BlankNode, Integer> bNodeHashMap) {
return 0xFF ^ predicate.hashCode() ^ nodeHash(subject, bNodeHashMap);
}
}
private static class ForwardProperty implements Property {
private IRI predicate;
private RDFTerm object;
public ForwardProperty(IRI predicate, RDFTerm object) {
this.predicate = predicate;
this.object = object;
}
@Override
public int hashCode(Map<BlankNode, Integer> bNodeHashMap) {
return predicate.hashCode() ^ nodeHash(object, bNodeHashMap);
}
}
private static class MappedNode implements BlankNodeOrIRI {
private BlankNode bNode1, bNode2;
public MappedNode(BlankNode bNode1, BlankNode bNode2) {
this.bNode1 = bNode1;
this.bNode2 = bNode2;
}
}
private static interface Property {
public int hashCode(Map<BlankNode, Integer> bNodeHashMap);
}
}
| 326 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching/GraphNotIsomorphicException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching;
/**
* @author reto
*/
class GraphNotIsomorphicException extends Exception {
}
| 327 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching/Utils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching;
import org.apache.clerezza.BlankNode;
import org.apache.clerezza.Triple;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
public class Utils {
static Set<BlankNode> getBNodes(Collection<Triple> s) {
Set<BlankNode> result = new HashSet<BlankNode>();
for (Triple triple : s) {
if (triple.getSubject() instanceof BlankNode) {
result.add((BlankNode) triple.getSubject());
}
if (triple.getObject() instanceof BlankNode) {
result.add((BlankNode) triple.getObject());
}
}
return result;
}
/**
* removes the common grounded triples from s1 and s2. returns false if
* a grounded triple is not in both sets, true otherwise
*/
static boolean removeGrounded(Collection<Triple> s1, Collection<Triple> s2) {
Iterator<Triple> triplesIter = s1.iterator();
while (triplesIter.hasNext()) {
Triple triple = triplesIter.next();
if (!isGrounded(triple)) {
continue;
}
if (!s2.remove(triple)) {
return false;
}
triplesIter.remove();
}
//for efficiency we might skip this (redefine method)
for (Triple triple : s2) {
if (isGrounded(triple)) {
return false;
}
}
return true;
}
private static boolean isGrounded(Triple triple) {
if (triple.getSubject() instanceof BlankNode) {
return false;
}
if (triple.getObject() instanceof BlankNode) {
return false;
}
return true;
}
}
| 328 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching/GroupMappingIterator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching;
import java.util.*;
/**
* Iterates over all mappings from each element of every Set<T> to each
* elemenent of their corresponding Set<U>.
*
* @author reto
*/
class GroupMappingIterator<T, U> implements Iterator<Map<T, U>> {
private Iterator<Map<T, U>> firstPartIter;
private Map<T, U> currentFirstPart;
final private Map<Set<T>, Set<U>> restMap;
private Iterator<Map<T, U>> currentRestPartIter;
static <T, U> Iterator<Map<T, U>> create(Map<Set<T>, Set<U>> matchingGroups) {
if (matchingGroups.size() > 1) {
return new GroupMappingIterator<T, U>(matchingGroups);
} else {
if (matchingGroups.size() == 0) {
return new ArrayList<Map<T, U>>(0).iterator();
}
Map.Entry<Set<T>, Set<U>> entry = matchingGroups.entrySet().iterator().next();
return new MappingIterator<T, U>(entry.getKey(),
entry.getValue());
}
}
private GroupMappingIterator(Map<Set<T>, Set<U>> matchingGroups) {
if (matchingGroups.size() == 0) {
throw new IllegalArgumentException("matchingGroups must not be empty");
}
restMap = new HashMap<Set<T>, Set<U>>();
boolean first = true;
for (Map.Entry<Set<T>, Set<U>> entry : matchingGroups.entrySet()) {
if (first) {
firstPartIter = new MappingIterator<T, U>(entry.getKey(),
entry.getValue());
first = false;
} else {
restMap.put(entry.getKey(), entry.getValue());
}
}
currentRestPartIter = create(restMap);
currentFirstPart = firstPartIter.next();
}
@Override
public boolean hasNext() {
return firstPartIter.hasNext() || currentRestPartIter.hasNext();
}
@Override
public Map<T, U> next() {
Map<T, U> restPart;
if (currentRestPartIter.hasNext()) {
restPart = currentRestPartIter.next();
} else {
if (firstPartIter.hasNext()) {
currentFirstPart = firstPartIter.next();
currentRestPartIter = create(restMap);
restPart = currentRestPartIter.next();
} else {
throw new NoSuchElementException();
}
}
Map<T, U> result = new HashMap<T, U>(restPart);
result.putAll(currentFirstPart);
return result;
}
@Override
public void remove() {
throw new UnsupportedOperationException("Not supported.");
}
}
| 329 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching/PermutationIterator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching;
import java.util.*;
/**
* An Iterator over all permuations of a list.
*
* @author reto
*/
class PermutationIterator<T> implements Iterator<List<T>> {
private Iterator<List<T>> restIterator;
private List<T> list;
private List<T> next;
int posInList = 0; //the position of the last element of next returned list
//with list, this is the one excluded from restIterator
PermutationIterator(List<T> list) {
this.list = Collections.unmodifiableList(list);
if (list.size() > 1) {
createRestList();
}
prepareNext();
}
@Override
public boolean hasNext() {
return next != null;
}
@Override
public List<T> next() {
List<T> result = next;
if (result == null) {
throw new NoSuchElementException();
}
prepareNext();
return result;
}
@Override
public void remove() {
throw new UnsupportedOperationException("Not supported");
}
private void createRestList() {
List<T> restList = new ArrayList<T>(list);
restList.remove(posInList);
restIterator = new PermutationIterator<T>(restList);
}
private void prepareNext() {
next = getNext();
}
private List<T> getNext() {
if (list.size() == 0) {
return null;
}
if (list.size() == 1) {
if (posInList++ == 0) {
return new ArrayList<T>(list);
} else {
return null;
}
} else {
if (!restIterator.hasNext()) {
if (posInList < (list.size() - 1)) {
posInList++;
createRestList();
} else {
return null;
}
}
List<T> result = restIterator.next();
result.add(list.get(posInList));
return result;
}
}
}
| 330 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching/GraphMatcher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching;
import org.apache.clerezza.*;
import org.apache.clerezza.implementation.TripleImpl;
import org.apache.clerezza.implementation.in_memory.SimpleGraph;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* @author reto
*/
public class GraphMatcher {
private final static Logger log = Logger.getLogger(GraphMatcher.class.getName());
/**
* get a mapping from g1 to g2 or null if the graphs are not isomorphic. The
* returned map maps each <code>BNode</code>s from g1 to one
* of g2. If the graphs are ground graphs the method return an empty map if
* the ImmutableGraph are equals and null otherwise.
*
* NOTE: This method does not returned mapping from blank nodes to grounded
* nodes, a bnode in g1 is not a variable that may match any node, but must
* match a bnode in g2.
*
* <p>On the algorithm:</p>
* <ul>
* <li>In a first step it checked if every grounded triple in g1 matches one in g2</li>
* <li>[optional] blank node blind matching</li>
* <li>in a map mbng1 bnode of g1 is mapped to a set of of its
* properties and inverse properties, this is the predicate and the object
* or subject respectively, analoguosly in mbgn2 every bnode of g2</li>
* <li>based on the incoming and outgoing properties a hash is calculated for
* each bnode, in the first step when calculating the hash a constant value
* is taken for the bnodes that might be subject or object in the (inverse properties)</li>
* </ul>
* - hash-classes:
*
* @param og1
* @param og2
* @return a Set of NodePairs
*/
public static Map<BlankNode, BlankNode> getValidMapping(Graph og1, Graph og2) {
Graph g1 = new SimpleGraph(og1);
Graph g2 = new SimpleGraph(og2);
if (!Utils.removeGrounded(g1, g2)) {
return null;
}
final HashMatching hashMatching;
try {
hashMatching = new HashMatching(g1, g2);
} catch (GraphNotIsomorphicException ex) {
return null;
}
Map<BlankNode, BlankNode> matchings = hashMatching.getMatchings();
if (g1.size() > 0) {
//start trial an error matching
//TODO (CLEREZZA-81) at least in the situation where one matching
//group is big (approx > 5) we should switch back to hash-based matching
//after a first guessed matching, rather than try all permutations
Map<BlankNode, BlankNode> remainingMappings = trialAndErrorMatching(g1, g2, hashMatching.getMatchingGroups());
if (remainingMappings == null) {
return null;
} else {
matchings.putAll(remainingMappings);
}
}
return matchings;
}
private static Map<BlankNode, BlankNode> trialAndErrorMatching(Graph g1, Graph g2,
Map<Set<BlankNode>, Set<BlankNode>> matchingGroups) {
if (log.isLoggable(Level.FINE)) {
Set<BlankNode> bn1 = Utils.getBNodes(g1);
log.log(Level.FINE, "doing trial and error matching for {0}" + " bnodes, " + "in graphs of size: {1}.", new Object[]{bn1.size(), g1.size()});
}
Iterator<Map<BlankNode, BlankNode>> mappingIter
= GroupMappingIterator.create(matchingGroups);
while (mappingIter.hasNext()) {
Map<BlankNode, BlankNode> map = mappingIter.next();
if (checkMapping(g1, g2, map)) {
return map;
}
}
return null;
}
private static boolean checkMapping(Graph g1, Graph g2, Map<BlankNode, BlankNode> map) {
for (Triple triple : g1) {
if (!g2.contains(map(triple, map))) {
return false;
}
}
return true;
}
private static Triple map(Triple triple, Map<BlankNode, BlankNode> map) {
final BlankNodeOrIRI oSubject = triple.getSubject();
BlankNodeOrIRI subject = oSubject instanceof BlankNode ?
map.get((BlankNode) oSubject) : oSubject;
RDFTerm oObject = triple.getObject();
RDFTerm object = oObject instanceof BlankNode ?
map.get((BlankNode) oObject) : oObject;
return new TripleImpl(subject, triple.getPredicate(), object);
}
}
| 331 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching/MappingIterator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching;
import java.util.*;
/**
* An iterator over all possible mapping beetween the elemnets of two sets of
* the same size, each mapping maps each element from set1 to a disctinct one of
* set2.
*
* @author reto
*/
class MappingIterator<T, U> implements Iterator<Map<T, U>> {
private List<T> list1;
private Iterator<List<U>> permutationList2Iterator;
public MappingIterator(Set<T> set1, Set<U> set2) {
if (set1.size() != set2.size()) {
throw new IllegalArgumentException();
}
this.list1 = new ArrayList<T>(set1);
permutationList2Iterator = new PermutationIterator<U>(
new ArrayList<U>(set2));
}
@Override
public boolean hasNext() {
return permutationList2Iterator.hasNext();
}
@Override
public Map<T, U> next() {
List<U> list2 = permutationList2Iterator.next();
Map<T, U> result = new HashMap<T, U>(list1.size());
for (int i = 0; i < list1.size(); i++) {
result.put(list1.get(i), list2.get(i));
}
return result;
}
@Override
public void remove() {
throw new UnsupportedOperationException("Not supported.");
}
}
| 332 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching/collections/IntHashSet.java | /*
* Copyright 2002-2004 The Apache Software Foundation.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching.collections;
import java.util.HashSet;
import java.util.Iterator;
/**
* This is currently just a placeholder implementation based on {@literal HashSet<Integer>}
* an efficient implementation is to store the primitives directly.
*
* @author reto
*/
public class IntHashSet extends HashSet<Integer> implements IntSet {
@Override
public IntIterator intIterator() {
final Iterator<Integer> base = iterator();
return new IntIterator() {
@Override
public int nextInt() {
return base.next();
}
@Override
public boolean hasNext() {
return base.hasNext();
}
@Override
public Integer next() {
return base.next();
}
@Override
public void remove() {
base.remove();
}
};
}
@Override
public void add(int i) {
super.add((Integer) i);
}
}
| 333 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching/collections/IntSet.java | /*
* Copyright 2002-2004 The Apache Software Foundation.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching.collections;
import java.util.Set;
/**
* An IntSet allows directly adding primitive ints to a set,
* {@literal Set<Integer>} is extended, but accessing the respective methods is less efficient.
*
* @author reto
*/
public interface IntSet extends Set<Integer> {
/**
* @return an iterator over the primitive int
*/
public IntIterator intIterator();
public void add(int i);
}
| 334 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching/collections/IntIterator.java | /*
* Copyright 2002-2004 The Apache Software Foundation.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graphmatching.collections;
import java.util.Iterator;
/**
* An iterator allowing to iterate over ints, {@literal Iterator<Integer>} is extended for
* compatibility, however accessing nextInt allows faster implementations.
*
* @author reto
*/
public interface IntIterator extends Iterator<Integer> {
public int nextInt();
}
| 335 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graphmatching/collections/IntHashMap.java | /*
* Copyright 2002-2004 The Apache Software Foundation.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
/*
* Note: originally released under the GNU LGPL v2.1,
* but rereleased by the original author under the ASF license (above).
*/
package org.apache.clerezza.implementation.graphmatching.collections;
/**
* <p>A hash map that uses primitive ints for the key rather than objects.</p>
* <p>Note that this class is for internal optimization purposes only, and may
* not be supported in future releases of Jakarta Commons Lang. Utilities of
* this sort may be included in future releases of Jakarta Commons Collections.</p>
*
* @author Justin Couch
* @author Alex Chaffee (alex@apache.org)
* @author Stephen Colebourne
* @version $Revision: 1.2 $
* @see java.util.HashMap
* @since 2.0
*/
public class IntHashMap<T> {
private IntSet keySet = new IntHashSet();
/**
* The hash table data.
*/
private transient Entry<T> table[];
/**
* The total number of entries in the hash table.
*/
private transient int count;
/**
* The table is rehashed when its size exceeds this threshold. (The
* value of this field is (int)(capacity * loadFactor).)
*
* @serial
*/
private int threshold;
/**
* The load factor for the hashtable.
*
* @serial
*/
private float loadFactor;
/**
* <p>Innerclass that acts as a datastructure to create a new entry in the
* table.</p>
*/
private static class Entry<T> {
int hash;
int key;
T value;
Entry<T> next;
/**
* <p>Create a new entry with the given values.</p>
*
* @param hash The code used to hash the object with
* @param key The key used to enter this in the table
* @param value The value for this key
* @param next A reference to the next entry in the table
*/
protected Entry(int hash, int key, T value, Entry<T> next) {
this.hash = hash;
this.key = key;
this.value = value;
this.next = next;
}
}
/**
* <p>Constructs a new, empty hashtable with a default capacity and load
* factor, which is <code>20</code> and <code>0.75</code> respectively.</p>
*/
public IntHashMap() {
this(20, 0.75f);
}
/**
* <p>Constructs a new, empty hashtable with the specified initial capacity
* and default load factor, which is <code>0.75</code>.</p>
*
* @param initialCapacity the initial capacity of the hashtable.
* @throws IllegalArgumentException if the initial capacity is less
* than zero.
*/
public IntHashMap(int initialCapacity) {
this(initialCapacity, 0.75f);
}
/**
* <p>Constructs a new, empty hashtable with the specified initial
* capacity and the specified load factor.</p>
*
* @param initialCapacity the initial capacity of the hashtable.
* @param loadFactor the load factor of the hashtable.
* @throws IllegalArgumentException if the initial capacity is less
* than zero, or if the load factor is nonpositive.
*/
public IntHashMap(int initialCapacity, float loadFactor) {
super();
if (initialCapacity < 0) {
throw new IllegalArgumentException("Illegal Capacity: " + initialCapacity);
}
if (loadFactor <= 0) {
throw new IllegalArgumentException("Illegal Load: " + loadFactor);
}
if (initialCapacity == 0) {
initialCapacity = 1;
}
this.loadFactor = loadFactor;
table = new Entry[initialCapacity];
threshold = (int) (initialCapacity * loadFactor);
}
/**
* <p>Returns the number of keys in this hashtable.</p>
*
* @return the number of keys in this hashtable.
*/
public int size() {
return count;
}
/**
* <p>Tests if this hashtable maps no keys to values.</p>
*
* @return <code>true</code> if this hashtable maps no keys to values;
* <code>false</code> otherwise.
*/
public boolean isEmpty() {
return count == 0;
}
/**
* <p>Tests if some key maps into the specified value in this hashtable.
* This operation is more expensive than the <code>containsKey</code>
* method.</p>
* <p>
* <p>Note that this method is identical in functionality to containsValue,
* (which is part of the Map interface in the collections framework).</p>
*
* @param value a value to search for.
* @return <code>true</code> if and only if some key maps to the
* <code>value</code> argument in this hashtable as
* determined by the <code>equals</code> method;
* <code>false</code> otherwise.
* @throws NullPointerException if the value is <code>null</code>.
* @see #containsKey(int)
* @see #containsValue(Object)
* @see java.util.Map
*/
public boolean contains(Object value) {
if (value == null) {
throw new NullPointerException();
}
Entry tab[] = table;
for (int i = tab.length; i-- > 0; ) {
for (Entry e = tab[i]; e != null; e = e.next) {
if (e.value.equals(value)) {
return true;
}
}
}
return false;
}
/**
* <p>Returns <code>true</code> if this HashMap maps one or more keys
* to this value.</p>
*
* <p>Note that this method is identical in functionality to contains
* (which predates the Map interface).</p>
*
* @param value value whose presence in this HashMap is to be tested.
* @see java.util.Map
* @since JDK1.2
*/
public boolean containsValue(Object value) {
return contains(value);
}
/**
* <p>Tests if the specified object is a key in this hashtable.</p>
*
* @param key possible key.
* @return <code>true</code> if and only if the specified object is a
* key in this hashtable, as determined by the <code>equals</code>
* method; <code>false</code> otherwise.
* @see #contains(Object)
*/
public boolean containsKey(int key) {
Entry tab[] = table;
int hash = key;
int index = (hash & 0x7FFFFFFF) % tab.length;
for (Entry e = tab[index]; e != null; e = e.next) {
if (e.hash == hash) {
return true;
}
}
return false;
}
/**
* <p>Returns the value to which the specified key is mapped in this map.</p>
*
* @param key a key in the hashtable.
* @return the value to which the key is mapped in this hashtable;
* <code>null</code> if the key is not mapped to any value in
* this hashtable.
* @see #put(int, Object)
*/
public T get(int key) {
Entry<T> tab[] = table;
int hash = key;
int index = (hash & 0x7FFFFFFF) % tab.length;
for (Entry<T> e = tab[index]; e != null; e = e.next) {
if (e.hash == hash) {
return e.value;
}
}
return null;
}
/**
* <p>Increases the capacity of and internally reorganizes this
* hashtable, in order to accommodate and access its entries more
* efficiently.</p>
* <p>
* <p>This method is called automatically when the number of keys
* in the hashtable exceeds this hashtable's capacity and load
* factor.</p>
*/
protected void rehash() {
int oldCapacity = table.length;
Entry<T> oldMap[] = table;
int newCapacity = oldCapacity * 2 + 1;
Entry<T> newMap[] = new Entry[newCapacity];
threshold = (int) (newCapacity * loadFactor);
table = newMap;
for (int i = oldCapacity; i-- > 0; ) {
for (Entry<T> old = oldMap[i]; old != null; ) {
Entry<T> e = old;
old = old.next;
int index = (e.hash & 0x7FFFFFFF) % newCapacity;
e.next = newMap[index];
newMap[index] = e;
}
}
}
/**
* <p>Maps the specified <code>key</code> to the specified
* <code>value</code> in this hashtable. The key cannot be
* <code>null</code>. </p>
* <p>
* <p>The value can be retrieved by calling the <code>get</code> method
* with a key that is equal to the original key.</p>
*
* @param key the hashtable key.
* @param value the value.
* @return the previous value of the specified key in this hashtable,
* or <code>null</code> if it did not have one.
* @throws NullPointerException if the key is <code>null</code>.
* @see #get(int)
*/
public Object put(int key, T value) {
keySet.add(key);
// Makes sure the key is not already in the hashtable.
Entry<T> tab[] = table;
int hash = key;
int index = (hash & 0x7FFFFFFF) % tab.length;
for (Entry<T> e = tab[index]; e != null; e = e.next) {
if (e.hash == hash) {
T old = e.value;
e.value = value;
return old;
}
}
if (count >= threshold) {
// Rehash the table if the threshold is exceeded
rehash();
tab = table;
index = (hash & 0x7FFFFFFF) % tab.length;
}
// Creates the new entry.
Entry<T> e = new Entry<T>(hash, key, value, tab[index]);
tab[index] = e;
count++;
return null;
}
/**
* <p>Removes the key (and its corresponding value) from this
* hashtable.</p>
*
* <p>This method does nothing if the key is not present in the
* hashtable.</p>
*
* @param key the key that needs to be removed.
* @return the value to which the key had been mapped in this hashtable,
* or <code>null</code> if the key did not have a mapping.
*/
/*public Object remove(int key) {
Entry tab[] = table;
int hash = key;
int index = (hash & 0x7FFFFFFF) % tab.length;
for (Entry e = tab[index], prev = null; e != null; prev = e, e = e.next) {
if (e.hash == hash) {
if (prev != null) {
prev.next = e.next;
} else {
tab[index] = e.next;
}
count--;
Object oldValue = e.value;
e.value = null;
return oldValue;
}
}
return null;
}*/
/**
* <p>Clears this hashtable so that it contains no keys.</p>
*/
public synchronized void clear() {
keySet.clear();
Entry tab[] = table;
for (int index = tab.length; --index >= 0; ) {
tab[index] = null;
}
count = 0;
}
public IntSet keySet() {
return keySet;
}
}
| 336 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/literal/TypedLiteralImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.literal;
import org.apache.clerezza.IRI;
import org.apache.clerezza.Language;
import java.io.Serializable;
/**
* @author reto
*/
public class TypedLiteralImpl extends AbstractLiteral implements Serializable {
private String lexicalForm;
private IRI dataType;
private int hashCode;
/**
* @param lexicalForm
* @param dataType
*/
public TypedLiteralImpl(String lexicalForm, IRI dataType) {
this.lexicalForm = lexicalForm;
this.dataType = dataType;
this.hashCode = super.hashCode();
}
public IRI getDataType() {
return dataType;
}
/* (non-Javadoc)
* @see org.apache.clerezza.rdf.core.LiteralNode#getLexicalForm()
*/
@Override
public String getLexicalForm() {
return lexicalForm;
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public String toString() {
StringBuffer result = new StringBuffer();
result.append('\"');
result.append(getLexicalForm());
result.append('\"');
result.append("^^");
result.append(getDataType());
return result.toString();
}
@Override
public Language getLanguage() {
return null;
}
}
| 337 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/literal/LiteralImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.literal;
import org.apache.clerezza.IRI;
import org.apache.clerezza.Language;
import java.io.Serializable;
/**
* @author reto
*/
public class LiteralImpl extends AbstractLiteral implements Serializable {
private String lexicalForm;
private IRI dataType;
private int hashCode;
private Language language;
/**
* @param lexicalForm
* @param dataType
* @param language the language of this literal
*/
public LiteralImpl(String lexicalForm, IRI dataType, Language language) {
this.lexicalForm = lexicalForm;
this.dataType = dataType;
this.language = language;
this.hashCode = super.hashCode();
}
public IRI getDataType() {
return dataType;
}
/* (non-Javadoc)
* @see org.apache.clerezza.rdf.core.LiteralNode#getLexicalForm()
*/
@Override
public String getLexicalForm() {
return lexicalForm;
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public String toString() {
StringBuffer result = new StringBuffer();
result.append('\"');
result.append(getLexicalForm());
result.append('\"');
result.append("^^");
result.append(getDataType());
return result.toString();
}
@Override
public Language getLanguage() {
return language;
}
}
| 338 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/literal/PlainLiteralImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.literal;
import org.apache.clerezza.IRI;
import org.apache.clerezza.Language;
import org.apache.clerezza.Literal;
import java.io.Serializable;
/**
* @author reto
*/
public class PlainLiteralImpl extends AbstractLiteral implements Literal, Serializable {
private final String lexicalForm;
private final Language language;
public PlainLiteralImpl(String value) {
this(value, null);
}
public PlainLiteralImpl(String value, Language language) {
if (value == null) {
throw new IllegalArgumentException("The literal string cannot be null");
}
this.lexicalForm = value;
this.language = language;
if (language == null) {
dataType = XSD_STRING;
} else {
dataType = RDF_LANG_STRING;
}
}
@Override
public String getLexicalForm() {
return lexicalForm;
}
@Override
public Language getLanguage() {
return language;
}
@Override
public String toString() {
final StringBuilder result = new StringBuilder();
result.append('\"').append(lexicalForm).append('\"');
if (language != null) {
result.append("@").append(language.toString());
}
return result.toString();
}
@Override
public IRI getDataType() {
return dataType;
}
private final IRI dataType;
private static final IRI XSD_STRING = new IRI("http://www.w3.org/2001/XMLSchema#string");
private static final IRI RDF_LANG_STRING = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#langString");
}
| 339 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/literal/NoConvertorException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.literal;
import java.lang.reflect.Type;
/**
* This exception is thrown when no convertor is available to do a required
* java-object to literal or literal to java-object conversion.
*
* @author reto
* @since 0.3
*/
public class NoConvertorException extends RuntimeException {
/**
* Create an instance of <code>NoConvertorException</code>
* indicating that no convertor is available for the type.
*
* @param type the type for which no convertor is available
*/
public NoConvertorException(Type type) {
super("No convertor available for type " + type);
}
}
| 340 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/literal/LiteralFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.literal;
import org.apache.clerezza.Literal;
/**
* This class provides methods to convert java objects to typed literals and
* vice versa. While the default implementation will provide literal objects
* storing the data's lexical form in memory, other implementations may
* create literal optimized for processing within the store.
* <p>
* Note: this class uses the notion of "Convertor" (in the Exception naming),
* but does not currently provide a mechanism to register such
* <code>Convertor</code>s. An implementation is said to provide
* <code>Convertor</code>s for the types it supports.
*
* @author reto
* @since 0.3
*/
public abstract class LiteralFactory {
private static LiteralFactory instance = new SimpleLiteralFactory();
/**
* Get a <code>LiteralFactory</code>. If this has not been set using
* setInstance it returns an instance of
* {@link org.apache.clerezza.implementation.literal.SimpleLiteralFactory}.
*
* @return a concrete <code>LiteralFactory</code>
*/
public static LiteralFactory getInstance() {
return instance;
}
/**
* Set the instance returned by <code>getInstance</code>.
*
* @param instance the new default <code>LiteralFactory</code>
*/
public static void setInstance(LiteralFactory instance) {
LiteralFactory.instance = instance;
}
/**
* Create a typed literal for the specified object
*
* @param value the value of the literal to be created
* @return a TypedLiteral representing the value
* @throws NoConvertorException thrown if <code>value</code> is of an invalid type
*/
public abstract Literal createTypedLiteral(Object value)
throws NoConvertorException;
/**
* Converts a literal to an instance of the specified class
*
* @param <T>
* @param type the <code>Class</code> of the returned object
* @param literal the literal to be converted
* @return a java object representing the value of the literal
* @throws NoConvertorException thrown if <code>type</code> is unsupported
* @throws InvalidLiteralTypeException if the literal type doesn't match the requested java type
*/
public abstract <T> T createObject(Class<T> type, Literal literal)
throws NoConvertorException, InvalidLiteralTypeException;
}
| 341 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/literal/AbstractLiteral.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.literal;
import org.apache.clerezza.Literal;
/**
* @author developer
*/
public abstract class AbstractLiteral implements Literal {
@Override
public int hashCode() {
int result = 0;
if (getLanguage() != null) {
result = getLanguage().hashCode();
}
result += getLexicalForm().hashCode();
result += getDataType().hashCode();
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof Literal) {
Literal other = (Literal) obj;
if (getLanguage() == null) {
if (other.getLanguage() != null) {
return false;
}
} else {
if (!getLanguage().equals(other.getLanguage())) {
return false;
}
}
boolean res = getDataType().equals(other.getDataType()) && getLexicalForm().equals(other.getLexicalForm());
return res;
} else {
return false;
}
}
}
| 342 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/literal/SimpleLiteralFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.literal;
import org.apache.clerezza.IRI;
import org.apache.clerezza.Literal;
import org.apache.clerezza.implementation.util.Base64;
import org.apache.clerezza.implementation.util.W3CDateFormat;
import java.math.BigInteger;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.*;
/**
* An implementation of literal factory currently supporting only
* byte[]/base64Binary and Java.util.Date/date
*
* @author reto
*/
public class SimpleLiteralFactory extends LiteralFactory {
private static final String XSD = "http://www.w3.org/2001/XMLSchema#";
final private static IRI xsdInteger = xsd("integer");
final private static IRI xsdInt = xsd("int");
final private static IRI xsdShort = xsd("short");
final private static IRI xsdByte = xsd("byte");
final private static IRI xsdLong = xsd("long");
final private static Set<IRI> decimalTypes = new HashSet<IRI>();
final private static Map<Class<?>, TypeConverter<?>> typeConverterMap = new HashMap<Class<?>, TypeConverter<?>>();
final static Class<? extends byte[]> byteArrayType;
static {
Collections.addAll(decimalTypes, xsdInteger, xsdInt, xsdByte, xsdShort, xsdLong);
byte[] byteArray = new byte[0];
byteArrayType = byteArray.getClass();
typeConverterMap.put(byteArrayType, new ByteArrayConverter());
typeConverterMap.put(Date.class, new DateConverter());
typeConverterMap.put(Boolean.class, new BooleanConverter());
typeConverterMap.put(String.class, new StringConverter());
typeConverterMap.put(Integer.class, new IntegerConverter());
typeConverterMap.put(BigInteger.class, new BigIntegerConverter());
typeConverterMap.put(Long.class, new LongConverter());
typeConverterMap.put(Double.class, new DoubleConverter());
typeConverterMap.put(Float.class, new FloatConverter());
typeConverterMap.put(IRI.class, new UriRefConverter());
}
final private static IRI xsdDouble = xsd("double");
final private static IRI xsdFloat = xsd("float");
final private static IRI xsdAnyURI = xsd("anyURI");
final private static IRI xsd(String name) {
return new IRI(XSD + name);
}
private static interface TypeConverter<T> {
Literal createLiteral(T value);
T createObject(Literal literal);
}
private static class ByteArrayConverter implements TypeConverter<byte[]> {
private static final IRI base64Uri = xsd("base64Binary");
@Override
public Literal createLiteral(byte[] value) {
return new TypedLiteralImpl(Base64.encode((byte[]) value), base64Uri);
}
@Override
public byte[] createObject(Literal literal) {
if (!literal.getDataType().equals(base64Uri)) {
throw new InvalidLiteralTypeException(byteArrayType, literal.getDataType());
}
return (byte[]) Base64.decode(literal.getLexicalForm());
}
}
private static class DateConverter implements TypeConverter<Date> {
private static final IRI dateTimeUri = xsd("dateTime");
private static final DateFormat DATE_FORMAT = new W3CDateFormat();
@Override
public Literal createLiteral(Date value) {
return new TypedLiteralImpl(DATE_FORMAT.format(value), dateTimeUri);
}
@Override
public Date createObject(Literal literal) {
if (!literal.getDataType().equals(dateTimeUri)) {
throw new InvalidLiteralTypeException(Date.class, literal.getDataType());
}
try {
return DATE_FORMAT.parse(literal.getLexicalForm());
} catch (ParseException ex) {
throw new RuntimeException("Exception parsing literal as date", ex);
}
}
}
private static class BooleanConverter implements TypeConverter<Boolean> {
private static final IRI booleanUri = xsd("boolean");
public static final TypedLiteralImpl TRUE = new TypedLiteralImpl("true", booleanUri);
public static final TypedLiteralImpl FALSE = new TypedLiteralImpl("false", booleanUri);
@Override
public Literal createLiteral(Boolean value) {
if (value) return TRUE;
else return FALSE;
}
@Override
public Boolean createObject(Literal literal) {
if (literal == TRUE) return true;
else if (literal == FALSE) return false;
else if (!literal.getDataType().equals(booleanUri)) {
throw new InvalidLiteralTypeException(Boolean.class, literal.getDataType());
}
return Boolean.valueOf(literal.getLexicalForm());
}
}
private static class StringConverter implements TypeConverter<String> {
private static final IRI stringUri = xsd("string");
@Override
public Literal createLiteral(String value) {
return new TypedLiteralImpl(value, stringUri);
}
@Override
public String createObject(Literal literal) {
if (!literal.getDataType().equals(stringUri)) {
throw new InvalidLiteralTypeException(String.class, literal.getDataType());
}
return literal.getLexicalForm();
}
}
private static class IntegerConverter implements TypeConverter<Integer> {
@Override
public Literal createLiteral(Integer value) {
return new TypedLiteralImpl(value.toString(), xsdInt);
}
@Override
public Integer createObject(Literal literal) {
if (!decimalTypes.contains(literal.getDataType())) {
throw new InvalidLiteralTypeException(Integer.class, literal.getDataType());
}
return new Integer(literal.getLexicalForm());
}
}
private static class LongConverter implements TypeConverter<Long> {
@Override
public Literal createLiteral(Long value) {
return new TypedLiteralImpl(value.toString(), xsdLong);
}
@Override
public Long createObject(Literal literal) {
if (!decimalTypes.contains(literal.getDataType())) {
throw new InvalidLiteralTypeException(Long.class, literal.getDataType());
}
return new Long(literal.getLexicalForm());
}
}
private static class FloatConverter implements TypeConverter<Float> {
@Override
public Literal createLiteral(Float value) {
return new TypedLiteralImpl(value.toString(), xsdFloat);
}
@Override
public Float createObject(Literal literal) {
if (!literal.getDataType().equals(xsdFloat)) {
throw new InvalidLiteralTypeException(Float.class, literal.getDataType());
}
return Float.valueOf(literal.getLexicalForm());
}
}
private static class DoubleConverter implements TypeConverter<Double> {
@Override
public Literal createLiteral(Double value) {
return new TypedLiteralImpl(value.toString(), xsdDouble);
}
@Override
public Double createObject(Literal literal) {
if (!literal.getDataType().equals(xsdDouble)) {
throw new InvalidLiteralTypeException(Double.class, literal.getDataType());
}
return new Double(literal.getLexicalForm());
}
}
private static class BigIntegerConverter implements TypeConverter<BigInteger> {
@Override
public Literal createLiteral(BigInteger value) {
return new TypedLiteralImpl(value.toString(), xsdInteger);
}
@Override
public BigInteger createObject(Literal literal) {
if (!literal.getDataType().equals(xsdInteger)) {
throw new InvalidLiteralTypeException(Double.class, literal.getDataType());
}
return new BigInteger(literal.getLexicalForm());
}
}
private static class UriRefConverter implements TypeConverter<IRI> {
@Override
public Literal createLiteral(IRI value) {
return new TypedLiteralImpl(value.getUnicodeString(), xsdAnyURI);
}
@Override
public IRI createObject(Literal literal) {
if (!literal.getDataType().equals(xsdAnyURI)) {
throw new InvalidLiteralTypeException(IRI.class, literal.getDataType());
}
return new IRI(literal.getLexicalForm());
}
}
@SuppressWarnings("unchecked")
@Override
public Literal createTypedLiteral(Object value) throws NoConvertorException {
TypeConverter converter = getConverterFor(value.getClass());
return converter.createLiteral(value);
}
@Override
public <T> T createObject(Class<T> type, Literal literal)
throws NoConvertorException, InvalidLiteralTypeException {
final TypeConverter<T> converter = getConverterFor(type);
return converter.createObject(literal);
}
@SuppressWarnings("unchecked")
private <T> TypeConverter<T> getConverterFor(Class<T> type) throws NoConvertorException {
TypeConverter<T> convertor = (TypeConverter<T>) typeConverterMap.get(type);
if (convertor != null) {
return convertor;
}
for (Map.Entry<Class<?>, TypeConverter<?>> converterEntry : typeConverterMap.entrySet()) {
if (type.isAssignableFrom(converterEntry.getKey())) {
return (TypeConverter<T>) converterEntry.getValue();
}
}
throw new NoConvertorException(type);
}
}
| 343 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/literal/InvalidLiteralTypeException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.literal;
import org.apache.clerezza.IRI;
/**
* Thrown when a literal is of the wrong type for conversion to a java-type
*
* @author reto
*/
public class InvalidLiteralTypeException extends RuntimeException {
/**
* Constructs the exception to be thrown when a literal cannot be
* converted to an instance of the specified class
*
* @param javaType the <code>Class</code> to convert to
* @param literalType the literalType which can't be converted
*/
public InvalidLiteralTypeException(Class<?> javaType, IRI literalType) {
super("Cannot create a " + javaType + " from a literal of type " + literalType);
}
}
| 344 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/util/Base64.java | /*
* Taken from the book:
* Jonathan Knudsen, "Java Cryptography", O'Reilly Media, Inc., 1998
*/
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.util;
public class Base64 {
public static String encode(byte[] raw) {
StringBuffer encoded = new StringBuffer();
for (int i = 0; i < raw.length; i += 3) {
encoded.append(encodeBlock(raw, i));
}
return encoded.toString();
}
protected static char[] encodeBlock(byte[] raw, int offset) {
int block = 0;
int slack = raw.length - offset - 1;
int end = (slack >= 2) ? 2 : slack;
for (int i = 0; i <= end; i++) {
byte b = raw[offset + i];
int neuter = (b < 0) ? b + 256 : b;
block += neuter << (8 * (2 - i));
}
char[] base64 = new char[4];
for (int i = 0; i < 4; i++) {
int sixbit = (block >>> (6 * (3 - i))) & 0x3f;
base64[i] = getChar(sixbit);
}
if (slack < 1) {
base64[2] = '=';
}
if (slack < 2) {
base64[3] = '=';
}
return base64;
}
protected static char getChar(int sixBit) {
if (sixBit >= 0 && sixBit <= 25) {
return (char) ('A' + sixBit);
}
if (sixBit >= 26 && sixBit <= 51) {
return (char) ('a' + (sixBit - 26));
}
if (sixBit >= 52 && sixBit <= 61) {
return (char) ('0' + (sixBit - 52));
}
if (sixBit == 62) {
return '+';
}
if (sixBit == 63) {
return '/';
}
return '?';
}
public static byte[] decode(String base64) {
int pad = 0;
for (int i = base64.length() - 1; base64.charAt(i) == '='; i--) {
pad++;
}
int length = base64.length() * 6 / 8 - pad;
byte[] raw = new byte[length];
int rawIndex = 0;
for (int i = 0; i < base64.length(); i += 4) {
int block = (getValue(base64.charAt(i)) << 18) + (getValue(base64.charAt(i + 1)) << 12) + (getValue(base64.charAt(i + 2)) << 6) + (getValue(base64.charAt(i + 3)));
for (int j = 0; j < 3 && rawIndex + j < raw.length; j++) {
raw[rawIndex + j] = (byte) ((block >> (8 * (2 - j))) & 0xff);
}
rawIndex += 3;
}
return raw;
}
protected static int getValue(char c) {
if (c >= 'A' && c <= 'Z') {
return c - 'A';
}
if (c >= 'a' && c <= 'z') {
return c - 'a' + 26;
}
if (c >= '0' && c <= '9') {
return c - '0' + 52;
}
if (c == '+') {
return 62;
}
if (c == '/') {
return 63;
}
if (c == '=') {
return 0;
}
return -1;
}
}
| 345 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/util/W3CDateFormat.java | //taken from GVS MillisDateFormat.java, modified to support different precision
/*
(c) Copyright 2005, 2006, Hewlett-Packard Development Company, LP
[See end of file]
$Id: W3CDateFormat.java,v 1.6 2007/05/07 18:45:22 rebach Exp $
*/
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.util;
import java.text.DateFormat;
import java.text.FieldPosition;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* @author reto implements http://www.w3.org/TR/NOTE-datetime with the
* limitation that it expects exactly a three digits decimal fraction of
* seconds. if a time zone designator other than 'Z' is present it must
* contain a column
*/
public class W3CDateFormat extends DateFormat {
/**
* An instance of this class
*/
public static final W3CDateFormat instance = new W3CDateFormat();
private static final SimpleDateFormat dateFormatWithMillis = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.SSSZ");
private static final SimpleDateFormat dateFormatNoMillis = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ssZ");
private static final long serialVersionUID = 3258407344076372025L;
private static final TimeZone utcTZ = new SimpleTimeZone(0, "UTC");
static {
dateFormatWithMillis.setTimeZone(utcTZ);
dateFormatNoMillis.setTimeZone(utcTZ);
}
@Override
public void setTimeZone(TimeZone zone) {
super.setTimeZone(zone);
}
/**
* @see java.text.DateFormat#format(java.util.Date, java.lang.StringBuffer,
* java.text.FieldPosition)
*/
@Override
public StringBuffer format(Date date, StringBuffer toAppendTo,
FieldPosition fieldPosition) {
final DateFormat dateFormat = (date.getTime() % 1000) == 0 ?
dateFormatNoMillis : dateFormatWithMillis;
String string = dateFormat.format(date);
if (string.endsWith("0000")) {
StringBuffer result = new StringBuffer(string.substring(0, string.length() - 5));
result.append('Z');
return result;
} else {
StringBuffer result = new StringBuffer(string);
result.insert(string.length() - 2, ':');
return result;
}
}
/**
* @see java.text.DateFormat#parse(java.lang.String,
* java.text.ParsePosition)
*/
public Date parse(String dateString, ParsePosition parsePos) {
int position = parsePos.getIndex();
int y1 = dateString.charAt(position++) - '0';
int y2 = dateString.charAt(position++) - '0';
int y3 = dateString.charAt(position++) - '0';
int y4 = dateString.charAt(position++) - '0';
int year = 1000 * y1 + 100 * y2 + 10 * y3 + y4;
position++; // skip '-'
int m1 = dateString.charAt(position++) - '0';
int m2 = dateString.charAt(position++) - '0';
int month = 10 * m1 + m2;
position++; // skip '-'
int d1 = dateString.charAt(position++) - '0';
int d2 = dateString.charAt(position++) - '0';
int day = 10 * d1 + d2;
position++; // skip 'T'
int h1 = dateString.charAt(position++) - '0';
int h2 = dateString.charAt(position++) - '0';
int hour = 10 * h1 + h2;
position++; // skip ':'
int min1 = dateString.charAt(position++) - '0';
int min2 = dateString.charAt(position++) - '0';
int minutes = 10 * min1 + min2;
position++; // skip ':'
int s1 = dateString.charAt(position++) - '0';
int s2 = dateString.charAt(position++) - '0';
int secs = 10 * s1 + s2;
Calendar resultCalendar = new GregorianCalendar(year, month - 1, day,
hour, minutes, secs);
resultCalendar.setTimeZone(utcTZ);
char afterSecChar = dateString.charAt(position++);
int msecs = 0;
char tzd1;
if (afterSecChar == '.') {
int startPos = position;
//read decimal part, this is till there is a 'Z', a '+' or a '-'
char nextChar = dateString.charAt(position++);
while ((nextChar != 'Z') && (nextChar != '-') && (nextChar != '+')) {
msecs += (nextChar - '0') * Math.pow(10, 3 + startPos - position);
nextChar = dateString.charAt(position++);
}
tzd1 = nextChar;
} else {
tzd1 = afterSecChar;
}
long timeInMillis = resultCalendar.getTimeInMillis() + msecs;
if (tzd1 != 'Z') {
int htz1 = dateString.charAt(position++) - '0';
int htz2 = dateString.charAt(position++) - '0';
int hourtz = 10 * htz1 + htz2;
position++; // skip ':'
int mintz1 = dateString.charAt(position++) - '0';
int mintz2 = dateString.charAt(position++) - '0';
int minutestz = 10 * mintz1 + mintz2;
int offSetInMillis = (hourtz * 60 + minutestz) * 60000;
if (tzd1 == '+') {
timeInMillis -= offSetInMillis;
} else {
timeInMillis += offSetInMillis;
}
}
parsePos.setIndex(position);
return new Date(timeInMillis);
}
}
/*
* (c) Copyright 2005, 2006 Hewlett-Packard Development Company, LP All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
| 346 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/in_memory/SimpleGraph.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.in_memory;
import org.apache.clerezza.*;
import org.apache.clerezza.implementation.graph.AbstractGraph;
import java.lang.ref.SoftReference;
import java.util.*;
/**
* For now this is a minimalistic implementation, without any indexes or other
* optimizations.
*
* @author reto
*/
public class SimpleGraph extends AbstractGraph {
final Set<Triple> triples;
private boolean checkConcurrency = false;
class SimpleIterator implements Iterator<Triple> {
private Iterator<Triple> listIter;
private boolean isValid = true;
public SimpleIterator(Iterator<Triple> listIter) {
this.listIter = listIter;
}
private Triple currentNext;
@Override
public boolean hasNext() {
checkValidity();
return listIter.hasNext();
}
@Override
public Triple next() {
checkValidity();
currentNext = listIter.next();
return currentNext;
}
@Override
public void remove() {
checkValidity();
listIter.remove();
triples.remove(currentNext);
invalidateIterators(this);
}
private void checkValidity() throws ConcurrentModificationException {
if (checkConcurrency && !isValid) {
throw new ConcurrentModificationException();
}
}
private void invalidate() {
isValid = false;
}
}
private final Set<SoftReference<SimpleIterator>> iterators =
Collections.synchronizedSet(new HashSet<SoftReference<SimpleIterator>>());
/**
* Creates an empty SimpleGraph
*/
public SimpleGraph() {
triples = Collections.synchronizedSet(new HashSet<Triple>());
}
/**
* Creates a SimpleGraph using the passed iterator, the iterator
* is consumed before the constructor returns
*
* @param iterator
*/
public SimpleGraph(Iterator<Triple> iterator) {
triples = new HashSet<Triple>();
while (iterator.hasNext()) {
Triple triple = iterator.next();
triples.add(triple);
}
}
/**
* Creates a SimpleGraph for the specified set of triples,
* subsequent modification of baseSet do affect the created instance.
*
* @param baseSet
*/
public SimpleGraph(Set<Triple> baseSet) {
this.triples = baseSet;
}
/**
* Creates a SimpleGraph for the specified collection of triples,
* subsequent modification of baseSet do not affect the created instance.
*
* @param baseCollection
*/
public SimpleGraph(Collection<Triple> baseCollection) {
this.triples = new HashSet<Triple>(baseCollection);
}
@Override
public int performSize() {
return triples.size();
}
@Override
public Iterator<Triple> performFilter(final BlankNodeOrIRI subject, final IRI predicate, final RDFTerm object) {
final List<Triple> tripleList = new ArrayList<Triple>();
synchronized (triples) {
Iterator<Triple> baseIter = triples.iterator();
while (baseIter.hasNext()) {
Triple triple = baseIter.next();
if ((subject != null)
&& (!triple.getSubject().equals(subject))) {
continue;
}
if ((predicate != null)
&& (!triple.getPredicate().equals(predicate))) {
continue;
}
if ((object != null)
&& (!triple.getObject().equals(object))) {
continue;
}
tripleList.add(triple);
}
final Iterator<Triple> listIter = tripleList.iterator();
SimpleIterator resultIter = new SimpleIterator(listIter);
if (checkConcurrency) {
iterators.add(new SoftReference<SimpleIterator>(resultIter));
}
return resultIter;
}
}
@Override
public boolean performAdd(Triple e) {
boolean modified = triples.add(e);
if (modified) {
invalidateIterators(null);
}
return modified;
}
private void invalidateIterators(SimpleIterator caller) {
if (!checkConcurrency) {
return;
}
Set<SoftReference> oldReferences = new HashSet<SoftReference>();
synchronized (iterators) {
for (SoftReference<SimpleGraph.SimpleIterator> softReference : iterators) {
SimpleIterator simpleIterator = softReference.get();
if (simpleIterator == null) {
oldReferences.add(softReference);
continue;
}
if (simpleIterator != caller) {
simpleIterator.invalidate();
}
}
}
iterators.removeAll(oldReferences);
}
/**
* Specifies whether or not to throw <code>ConcurrentModificationException</code>s,
* if this simple triple collection is modified concurrently. Concurrency
* check is set to false by default.
*
* @param bool Specifies whether or not to check concurrent modifications.
*/
public void setCheckConcurrency(boolean bool) {
checkConcurrency = bool;
}
@Override
public ImmutableGraph getImmutableGraph() {
return new SimpleImmutableGraph(this);
}
}
| 347 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/in_memory/SimpleImmutableGraph.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.in_memory;
import org.apache.clerezza.*;
import org.apache.clerezza.implementation.graph.AbstractImmutableGraph;
import org.apache.clerezza.implementation.in_memory.SimpleGraph;
import java.util.Iterator;
/**
* @author reto
*/
public class SimpleImmutableGraph extends AbstractImmutableGraph {
private Graph graph;
/**
* Creates a ImmutableGraph with the triples in Graph
*
* @param Graph the collection of triples this ImmutableGraph shall consist of
*/
public SimpleImmutableGraph(Graph Graph) {
this.graph = new SimpleGraph(Graph.iterator());
}
/**
* Creates a ImmutableGraph with the triples in Graph.
* <p>
* This construction allows to specify if the Graph might change
* in future. If GraphWillNeverChange is set to true it will
* assume that the collection never changes, in this case the collection
* isn't copied making things more efficient.
*
* @param Graph the collection of triples this ImmutableGraph shall consist of
* @param GraphWillNeverChange true if the caller promises Graph will never change
*/
public SimpleImmutableGraph(Graph Graph, boolean GraphWillNeverChange) {
if (!GraphWillNeverChange) {
this.graph = new SimpleGraph(Graph.iterator());
} else {
this.graph = Graph;
}
}
public SimpleImmutableGraph(Iterator<Triple> tripleIter) {
this.graph = new SimpleGraph(tripleIter);
}
@Override
public int performSize() {
return graph.size();
}
@Override
public Iterator<Triple> performFilter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) {
return graph.filter(subject, predicate, object);
}
}
| 348 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graph/DelayedNotificator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graph;
import org.apache.clerezza.event.GraphEvent;
import org.apache.clerezza.event.GraphListener;
import java.lang.ref.WeakReference;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* @author reto
*/
class DelayedNotificator {
private static final Logger log = Logger.getLogger(DelayedNotificator.class.getName());
private static Timer timer = new Timer("Event delivery timer", true);
static class ListenerHolder {
long delay;
List<GraphEvent> events = null;
WeakReference<GraphListener> listenerRef;
public ListenerHolder(GraphListener listener, long delay) {
this.listenerRef = new WeakReference<GraphListener>(listener);
this.delay = delay;
}
private void registerEvent(GraphEvent event) {
synchronized (this) {
if (events == null) {
events = new ArrayList<GraphEvent>();
events.add(event);
timer.schedule(new TimerTask() {
@Override
public void run() {
List<GraphEvent> eventsLocal;
synchronized (ListenerHolder.this) {
eventsLocal = events;
events = null;
}
GraphListener listener = listenerRef.get();
if (listener == null) {
log.fine("Ignoring garbage collected listener");
} else {
try {
listener.graphChanged(eventsLocal);
} catch (Exception e) {
log.log(Level.WARNING, "Exception delivering ImmutableGraph event", e);
}
}
}
}, delay);
} else {
events.add(event);
}
}
}
}
private final Map<GraphListener, ListenerHolder> map = Collections.synchronizedMap(
new WeakHashMap<GraphListener, ListenerHolder>());
void addDelayedListener(GraphListener listener, long delay) {
map.put(listener, new ListenerHolder(listener, delay));
}
/**
* removes a Listener, this doesn't prevent the listenerRef from receiving
* events alreay scheduled.
*
* @param listenerRef
*/
void removeDelayedListener(GraphListener listener) {
map.remove(listener);
}
/**
* if the listenerRef has not been registered as delayed listenerRef te events is
* forwarded synchroneously
*
* @param event
*/
void sendEventToListener(GraphListener listener, GraphEvent event) {
ListenerHolder holder = map.get(listener);
if (holder == null) {
listener.graphChanged(Collections.singletonList(event));
} else {
holder.registerEvent(event);
}
}
}
| 349 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graph/LockingIterator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graph;
import org.apache.clerezza.Triple;
import java.util.Iterator;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
/**
* Wrapps an iterator<Triple> reading entering a read-lock on every invocation
* of hasNext and next
*
* @author reto
*/
class LockingIterator implements Iterator<Triple> {
private Iterator<Triple> base;
private Lock readLock;
private Lock writeLock;
public LockingIterator(Iterator<Triple> iterator, ReadWriteLock lock) {
base = iterator;
readLock = lock.readLock();
writeLock = lock.writeLock();
}
@Override
public boolean hasNext() {
readLock.lock();
try {
return base.hasNext();
} finally {
readLock.unlock();
}
}
@Override
public Triple next() {
readLock.lock();
try {
return base.next();
} finally {
readLock.unlock();
}
}
@Override
public void remove() {
writeLock.lock();
try {
base.remove();
} finally {
writeLock.unlock();
}
}
}
| 350 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graph/ReadOnlyException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graph;
import java.security.AccessControlException;
/**
* is thrown on an attempt to add or remove triples to a
* read-only mutable ImmutableGraph
*
* @author tsuy
*/
public class ReadOnlyException extends AccessControlException {
private String action;
/**
* creates an exception indicating that the mutable ImmutableGraph is read-only
*
* @param action
*/
public ReadOnlyException(String action) {
super("read only mutable ImmutableGraph, not allowed to " + action);
this.action = action;
}
/**
* @return the name of the action
*/
public String getAction() {
return action;
}
}
| 351 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graph/AbstractImmutableGraph.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graph;
import org.apache.clerezza.RDFTerm;
import org.apache.clerezza.BlankNode;
import org.apache.clerezza.ImmutableGraph;
import org.apache.clerezza.Triple;
import org.apache.clerezza.implementation.graphmatching.GraphMatcher;
import java.util.Collection;
import java.util.Iterator;
/**
* <code>AbstractGraph</code> is an abstract implementation of <code>ImmutableGraph</code>
* implementing the <code>equals</code> and the <code>hashCode</code> methods.
*
* @author reto
*/
public abstract class AbstractImmutableGraph extends AbstractGraph
implements ImmutableGraph {
public final synchronized int hashCode() {
int result = 0;
for (Iterator<Triple> iter = iterator(); iter.hasNext(); ) {
result += getBlankNodeBlindHash(iter.next());
}
return result;
}
/**
* @param triple
* @return hash without BNode hashes
*/
private int getBlankNodeBlindHash(Triple triple) {
int hash = triple.getPredicate().hashCode();
RDFTerm subject = triple.getSubject();
if (!(subject instanceof BlankNode)) {
hash ^= subject.hashCode() >> 1;
}
RDFTerm object = triple.getObject();
if (!(object instanceof BlankNode)) {
hash ^= object.hashCode() << 1;
}
return hash;
}
@Override
public boolean add(Triple e) {
throw new UnsupportedOperationException("Graphs are not mutable, use Graph");
}
@Override
public boolean addAll(Collection<? extends Triple> c) {
throw new UnsupportedOperationException("Graphs are not mutable, use Graph");
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException("Graphs are not mutable, use Graph");
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException("Graphs are not mutable, use Graph");
}
@Override
public void clear() {
throw new UnsupportedOperationException("Graphs are not mutable, use Graph");
}
@Override
public ImmutableGraph getImmutableGraph() {
return this;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof ImmutableGraph)) {
return false;
}
if (hashCode() != obj.hashCode()) {
return false;
}
return GraphMatcher.getValidMapping(this, (ImmutableGraph) obj) != null;
}
}
| 352 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graph/PrivilegedGraphWrapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graph;
import org.apache.clerezza.*;
import org.apache.clerezza.implementation.in_memory.SimpleImmutableGraph;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Collection;
import java.util.Iterator;
import java.util.concurrent.locks.ReadWriteLock;
/**
* Calls the methods of the wrapped <code>Graph</code> as privileged
* code, because they may need permissions like writing to disk or accessing
* network.
*
* @author mir
*/
public class PrivilegedGraphWrapper implements Graph {
private Graph graph;
public PrivilegedGraphWrapper(Graph Graph) {
this.graph = Graph;
}
@Override
public Iterator<Triple> filter(final BlankNodeOrIRI subject, final IRI predicate,
final RDFTerm object) {
return AccessController.doPrivileged(new PrivilegedAction<Iterator<Triple>>() {
@Override
public Iterator<Triple> run() {
return graph.filter(subject, predicate, object);
}
});
}
@Override
public int size() {
return AccessController.doPrivileged(new PrivilegedAction<Integer>() {
@Override
public Integer run() {
return graph.size();
}
});
}
@Override
public boolean isEmpty() {
return AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
return graph.isEmpty();
}
});
}
@Override
public boolean contains(final Object o) {
return AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
return graph.contains(o);
}
});
}
@Override
public Iterator<Triple> iterator() {
return AccessController.doPrivileged(new PrivilegedAction<Iterator<Triple>>() {
@Override
public Iterator<Triple> run() {
return graph.iterator();
}
});
}
@Override
public Object[] toArray() {
return AccessController.doPrivileged(new PrivilegedAction<Object[]>() {
@Override
public Object[] run() {
return graph.toArray();
}
});
}
@Override
public <T> T[] toArray(final T[] a) {
return AccessController.doPrivileged(new PrivilegedAction<T[]>() {
@Override
public T[] run() {
return graph.toArray(a);
}
});
}
@Override
public boolean add(final Triple triple) {
return AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
return graph.add(triple);
}
});
}
@Override
public boolean remove(final Object o) {
return AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
return graph.remove(o);
}
});
}
@Override
public boolean containsAll(final Collection<?> c) {
return AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
return graph.containsAll(c);
}
});
}
@Override
public boolean addAll(final Collection<? extends Triple> c) {
return AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
return graph.addAll(c);
}
});
}
@Override
public boolean removeAll(final Collection<?> c) {
return AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
return graph.removeAll(c);
}
});
}
@Override
public boolean retainAll(final Collection<?> c) {
return AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
return graph.retainAll(c);
}
});
}
@Override
public void clear() {
AccessController.doPrivileged(new PrivilegedAction<Object>() {
@Override
public Object run() {
graph.clear();
return null;
}
});
}
@Override
public ReadWriteLock getLock() {
return graph.getLock();
}
private static class PriviledgedTripleIterator implements Iterator<Triple> {
private final Iterator<Triple> wrappedIterator;
public PriviledgedTripleIterator(Iterator<Triple> wrappedIterator) {
this.wrappedIterator = wrappedIterator;
}
@Override
public boolean hasNext() {
return AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
return wrappedIterator.hasNext();
}
});
}
@Override
public Triple next() {
return AccessController.doPrivileged(new PrivilegedAction<Triple>() {
@Override
public Triple run() {
return wrappedIterator.next();
}
});
}
@Override
public void remove() {
AccessController.doPrivileged(new PrivilegedAction<Object>() {
@Override
public Object run() {
wrappedIterator.remove();
return null;
}
});
}
}
@Override
public ImmutableGraph getImmutableGraph() {
return new SimpleImmutableGraph(this);
}
}
| 353 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graph/WatchableGraphWrapper.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graph;
import org.apache.clerezza.*;
import org.apache.clerezza.event.*;
import java.lang.ref.WeakReference;
import java.util.*;
import java.util.concurrent.locks.ReadWriteLock;
/**
* @author developer
*/
public class WatchableGraphWrapper implements WatchableGraph {
final Graph wrapped;
public WatchableGraphWrapper(Graph wrapped) {
this.wrapped = wrapped;
}
//all listeners
private final Set<ListenerConfiguration> listenerConfigs = Collections.synchronizedSet(
new HashSet<ListenerConfiguration>());
private DelayedNotificator delayedNotificator = new DelayedNotificator();
@Override
public Iterator<Triple> iterator() {
return filter(null, null, null);
}
@Override
public boolean contains(Object o) {
if (!(o instanceof Triple)) {
return false;
}
Triple t = (Triple) o;
return filter(t.getSubject(), t.getPredicate(), t.getObject()).hasNext();
}
@Override
public Iterator<Triple> filter(BlankNodeOrIRI subject, IRI predicate,
RDFTerm object) {
final Iterator<Triple> baseIter = wrapped.filter(subject, predicate, object);
return new Iterator<Triple>() {
Triple currentTriple = null;
@Override
public boolean hasNext() {
return baseIter.hasNext();
}
@Override
public Triple next() {
currentTriple = baseIter.next();
return currentTriple;
}
@Override
public void remove() {
baseIter.remove();
dispatchEvent(new RemoveEvent(WatchableGraphWrapper.this, currentTriple));
}
};
}
@Override
public boolean add(Triple triple) {
boolean success = performAdd(triple);
if (success) {
dispatchEvent(new AddEvent(this, triple));
}
return success;
}
/**
* A subclass of <code>AbstractGraph</code> should override
* this method instead of <code>add</code> for Graph event support to be
* added.
*
* @param e The triple to be added to the triple collection
* @return
*/
protected boolean performAdd(Triple e) {
return wrapped.add(e);
}
@Override
public boolean remove(Object o) {
Triple triple = (Triple) o;
boolean success = performRemove(triple);
if (success) {
dispatchEvent(new RemoveEvent(this, triple));
}
return success;
}
@Override
public boolean removeAll(Collection<?> c) {
boolean modified = false;
for (Iterator<? extends Object> it = c.iterator(); it.hasNext(); ) {
Object object = it.next();
if (remove(object)) {
modified = true;
}
}
return modified;
}
/**
* A subclass of <code>AbstractGraph</code> should override
* this method instead of <code>remove</code> for ImmutableGraph event support to be
* added.
*
* @param triple The triple to be removed from the triple collection
* @return
*/
protected boolean performRemove(Triple triple) {
Iterator<Triple> e = filter(null, null, null);
while (e.hasNext()) {
if (triple.equals(e.next())) {
e.remove();
return true;
}
}
return false;
}
/**
* Dispatches a <code>GraphEvent</code> to all registered listeners for which
* the specified <code>Triple</code> matches the <code>FilterTriple</code>s
* of the listeners.
*
* @param event The GraphEvent to dispatch
*/
protected void dispatchEvent(GraphEvent event) {
synchronized (listenerConfigs) {
Iterator<ListenerConfiguration> iter = listenerConfigs.iterator();
while (iter.hasNext()) {
ListenerConfiguration config = iter.next();
GraphListener registeredListener = config.getListener();
if (registeredListener == null) {
iter.remove();
continue;
}
if (config.getFilter().match(event.getTriple())) {
delayedNotificator.sendEventToListener(registeredListener, event);
}
}
}
}
@Override
public void addGraphListener(GraphListener listener, FilterTriple filter) {
addGraphListener(listener, filter, 0);
}
@Override
public void addGraphListener(GraphListener listener, FilterTriple filter,
long delay) {
listenerConfigs.add(new ListenerConfiguration(listener, filter));
if (delay > 0) {
delayedNotificator.addDelayedListener(listener, delay);
}
}
@Override
public void removeGraphListener(GraphListener listener) {
synchronized (listenerConfigs) {
Iterator<ListenerConfiguration> iter = listenerConfigs.iterator();
while (iter.hasNext()) {
ListenerConfiguration listenerConfig = iter.next();
GraphListener registeredListener = listenerConfig.getListener();
if ((registeredListener == null) || (registeredListener.equals(listener))) {
iter.remove();
}
}
}
delayedNotificator.removeDelayedListener(listener);
}
@Override
public ImmutableGraph getImmutableGraph() {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public ReadWriteLock getLock() {
return wrapped.getLock();
}
@Override
public int size() {
return wrapped.size();
}
@Override
public boolean isEmpty() {
return wrapped.isEmpty();
}
@Override
public Object[] toArray() {
return wrapped.toArray();
}
@Override
public <T> T[] toArray(T[] a) {
return wrapped.toArray(a);
}
@Override
public boolean containsAll(Collection<?> c) {
return wrapped.containsAll(c);
}
@Override
public boolean addAll(Collection<? extends Triple> c) {
return wrapped.addAll(c);
}
@Override
public boolean retainAll(Collection<?> c) {
return wrapped.retainAll(c);
}
@Override
public void clear() {
wrapped.clear();
}
private static class ListenerConfiguration {
private WeakReference<GraphListener> listenerRef;
private FilterTriple filter;
private ListenerConfiguration(GraphListener listener, FilterTriple filter) {
this.listenerRef = new WeakReference<GraphListener>(listener);
this.filter = filter;
}
/**
* @return the listener
*/
GraphListener getListener() {
GraphListener listener = listenerRef.get();
return listener;
}
/**
* @return the filter
*/
FilterTriple getFilter() {
return filter;
}
}
}
| 354 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graph/WriteBlockedGraph.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graph;
import org.apache.clerezza.*;
import java.util.Collection;
import java.util.Iterator;
/**
* This is a wrapper object for <code>Graph</code>. If <code>SecurityManger</code>
* is not <code>null</code> <code>TcManager</code> checks the <code>TcPermission</code>.
* If read-only permissions are set this wrapper is used instead of <code>Graph</code>
* and throws exceptions when add or remove methods are called.
*
* @author tsuy
*/
public class WriteBlockedGraph extends AbstractGraph
implements Graph {
private Graph triples;
public WriteBlockedGraph(Graph triples) {
this.triples = triples;
}
@Override
protected int performSize() {
return triples.size();
}
@Override
protected Iterator<Triple> performFilter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) {
final Iterator<Triple> baseIter = triples.filter(subject, predicate, object);
return new Iterator<Triple>() {
@Override
public boolean hasNext() {
return baseIter.hasNext();
}
@Override
public Triple next() {
return baseIter.next();
}
@Override
public void remove() {
throw new ReadOnlyException("remove");
}
};
}
@Override
public boolean add(Triple e) {
throw new ReadOnlyException("add");
}
@Override
public boolean addAll(Collection<? extends Triple> c) {
throw new ReadOnlyException("add all");
}
@Override
public void clear() {
throw new ReadOnlyException("clear");
}
@Override
public boolean remove(Object o) {
throw new ReadOnlyException("remove");
}
@Override
public boolean removeAll(Collection<?> c) {
throw new ReadOnlyException("remove all");
}
@Override
public boolean retainAll(Collection<?> c) {
throw new ReadOnlyException("retain all");
}
@Override
public Iterator iterator() {
return filter(null, null, null);
}
@Override
public ImmutableGraph getImmutableGraph() {
return this.triples.getImmutableGraph();
}
}
| 355 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graph/PrivilegedImmuatbleGraphWrapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graph;
import org.apache.clerezza.ImmutableGraph;
/**
* Calls the methods of the wrapped <code>ImmutableGraph</code> as privileged
* code, because they may need permissions like writing to disk or accessing
* network.
*
* @author mir
*/
public class PrivilegedImmuatbleGraphWrapper extends PrivilegedGraphWrapper
implements ImmutableGraph {
public PrivilegedImmuatbleGraphWrapper(ImmutableGraph ImmutableGraph) {
super(ImmutableGraph);
}
}
| 356 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/graph/AbstractGraph.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.graph;
import org.apache.clerezza.*;
import org.apache.clerezza.implementation.debug.ReentrantReadWriteLockTracker;
import org.apache.clerezza.implementation.in_memory.SimpleImmutableGraph;
import java.util.AbstractCollection;
import java.util.Collection;
import java.util.Iterator;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* An abstract implementation of <code>Graph</code> implementing
* <code>iterator</code> and <code>contains</code> calling <code>filter</code>.
*
* @author reto
*/
public abstract class AbstractGraph extends AbstractCollection<Triple>
implements Graph {
private static final String DEBUG_MODE = "rdfLocksDebugging";
private final ReadWriteLock lock;
private final Lock readLock;
private final Lock writeLock;
public AbstractGraph() {
{
String debugMode = System.getProperty(DEBUG_MODE);
if (debugMode != null && debugMode.toLowerCase().equals("true")) {
lock = new ReentrantReadWriteLockTracker();
} else {
lock = new ReentrantReadWriteLock();
}
}
readLock = lock.readLock();
writeLock = lock.writeLock();
}
/**
* @param lock
*/
public AbstractGraph(final ReadWriteLock lock) {
this.lock = lock;
readLock = lock.readLock();
writeLock = lock.writeLock();
}
@Override
public ReadWriteLock getLock() {
return lock;
}
@Override
public ImmutableGraph getImmutableGraph() {
readLock.lock();
try {
return performGetImmutableGraph();
} finally {
readLock.unlock();
}
}
public ImmutableGraph performGetImmutableGraph() {
return new SimpleImmutableGraph(this);
}
@Override
public Iterator<Triple> filter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) {
readLock.lock();
try {
return new LockingIterator(performFilter(subject, predicate, object), lock);
} finally {
readLock.unlock();
}
}
@Override
public int size() {
readLock.lock();
try {
return performSize();
} finally {
readLock.unlock();
}
}
@Override
public boolean isEmpty() {
readLock.lock();
try {
return performIsEmpty();
} finally {
readLock.unlock();
}
}
@Override
@SuppressWarnings("element-type-mismatch")
public boolean contains(Object o) {
readLock.lock();
try {
return performContains(o);
} finally {
readLock.unlock();
}
}
@Override
public Iterator<Triple> iterator() {
readLock.lock();
try {
return new LockingIterator(performIterator(), lock);
} finally {
readLock.unlock();
}
}
@Override
public Object[] toArray() {
readLock.lock();
try {
return performToArray();
} finally {
readLock.unlock();
}
}
@Override
public <T> T[] toArray(T[] a) {
readLock.lock();
try {
return performToArray(a);
} finally {
readLock.unlock();
}
}
@Override
public boolean containsAll(Collection<?> c) {
readLock.lock();
try {
return performContainsAll(c);
} finally {
readLock.unlock();
}
}
@Override
public boolean add(Triple e) {
writeLock.lock();
try {
return performAdd(e);
} finally {
writeLock.unlock();
}
}
@Override
public boolean remove(Object o) {
writeLock.lock();
try {
return performRemove(o);
} finally {
writeLock.unlock();
}
}
@Override
public boolean addAll(Collection<? extends Triple> c) {
writeLock.lock();
try {
return performAddAll(c);
} finally {
writeLock.unlock();
}
}
@Override
public boolean removeAll(Collection<?> c) {
writeLock.lock();
try {
return performRemoveAll(c);
} finally {
writeLock.unlock();
}
}
@Override
public boolean retainAll(Collection<?> c) {
writeLock.lock();
try {
return performRetainAll(c);
} finally {
writeLock.unlock();
}
}
@Override
public void clear() {
writeLock.lock();
try {
performClear();
} finally {
writeLock.unlock();
}
}
@Override
public boolean equals(Object obj) {
/*if (obj == null) {
return false;
}
if (obj == this) {
return true;
}
if (obj.getClass() != getClass()) {
return false;
}*/
return this == obj;
}
protected abstract Iterator<Triple> performFilter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object);
protected abstract int performSize();
protected boolean performIsEmpty() {
return super.isEmpty();
}
protected Object[] performToArray() {
return super.toArray();
}
protected boolean performRemove(Object o) {
return super.remove(o);
}
protected boolean performAddAll(Collection<? extends Triple> c) {
return super.addAll(c);
}
protected boolean performRemoveAll(Collection<?> c) {
return super.removeAll(c);
}
protected boolean performRetainAll(Collection<?> c) {
return super.retainAll(c);
}
protected void performClear() {
super.clear();
}
protected boolean performContains(Object o) {
return super.contains(o);
}
protected Iterator<Triple> performIterator() {
return performFilter(null, null, null);
}
protected boolean performContainsAll(Collection<?> c) {
return super.containsAll(c);
}
protected <T> T[] performToArray(T[] a) {
return super.toArray(a);
}
protected boolean performAdd(Triple e) {
return super.add(e);
}
}
| 357 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/debug/ReadLockDebug.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.debug;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
/**
* @author mir
*/
public class ReadLockDebug extends ReadLock {
ReentrantReadWriteLockTracker lock;
StackTraceElement[] stackTrace;
ReadLock readLock;
public ReadLockDebug(ReentrantReadWriteLockTracker lock) {
super(lock);
this.lock = lock;
this.readLock = lock.realReadLock();
}
@Override
public void lock() {
readLock.lock();
lock.addLockedReadLock(this);
stackTrace = Thread.currentThread().getStackTrace();
}
@Override
public void lockInterruptibly() throws InterruptedException {
readLock.lockInterruptibly();
}
@Override
public Condition newCondition() {
return readLock.newCondition();
}
@Override
public String toString() {
return readLock.toString();
}
@Override
public boolean tryLock() {
return readLock.tryLock();
}
@Override
public boolean tryLock(long timeout, TimeUnit unit) throws InterruptedException {
return readLock.tryLock(timeout, unit);
}
@Override
public void unlock() {
readLock.unlock();
lock.removeReadLock(this);
stackTrace = null;
}
public StackTraceElement[] getStackTrace() {
return stackTrace;
}
}
| 358 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/debug/WriteLockDebug.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.debug;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
/**
* @author mir
*/
public class WriteLockDebug extends WriteLock {
private ReentrantReadWriteLockTracker lock;
private WriteLock writeLock;
private StackTraceElement[] stackTrace;
public WriteLockDebug(ReentrantReadWriteLockTracker lock) {
super(lock);
this.lock = lock;
this.writeLock = lock.realWriteLock();
}
@Override
public int getHoldCount() {
return writeLock.getHoldCount();
}
@Override
public boolean isHeldByCurrentThread() {
return writeLock.isHeldByCurrentThread();
}
@Override
public void lock() {
writeLock.lock();
stackTrace = Thread.currentThread().getStackTrace();
}
@Override
public void lockInterruptibly() throws InterruptedException {
writeLock.lockInterruptibly();
}
@Override
public Condition newCondition() {
return writeLock.newCondition();
}
@Override
public boolean tryLock() {
return writeLock.tryLock();
}
@Override
public boolean tryLock(long timeout, TimeUnit unit) throws InterruptedException {
return writeLock.tryLock(timeout, unit);
}
@Override
public void unlock() {
writeLock.unlock();
stackTrace = null;
}
public StackTraceElement[] getStackTrace() {
return stackTrace;
}
}
| 359 |
0 | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation | Create_ds/clerezza/api-implementation/src/main/java/org/apache/clerezza/implementation/debug/ReentrantReadWriteLockTracker.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.implementation.debug;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* @author mir
*/
public class ReentrantReadWriteLockTracker extends ReentrantReadWriteLock {
private Set<ReadLockDebug> lockedReadLocks = Collections.synchronizedSet(new HashSet<ReadLockDebug>());
private final WriteLockDebug writeLock = new WriteLockDebug(this);
@Override
protected Thread getOwner() {
return super.getOwner();
}
@Override
protected Collection<Thread> getQueuedReaderThreads() {
return super.getQueuedReaderThreads();
}
@Override
protected Collection<Thread> getQueuedThreads() {
return super.getQueuedThreads();
}
@Override
protected Collection<Thread> getQueuedWriterThreads() {
return super.getQueuedWriterThreads();
}
@Override
public int getReadHoldCount() {
return super.getReadHoldCount();
}
@Override
public int getReadLockCount() {
return super.getReadLockCount();
}
@Override
public int getWaitQueueLength(Condition condition) {
return super.getWaitQueueLength(condition);
}
@Override
protected Collection<Thread> getWaitingThreads(Condition condition) {
return super.getWaitingThreads(condition);
}
@Override
public int getWriteHoldCount() {
return super.getWriteHoldCount();
}
@Override
public boolean hasWaiters(Condition condition) {
return super.hasWaiters(condition);
}
@Override
public boolean isWriteLocked() {
return super.isWriteLocked();
}
@Override
public boolean isWriteLockedByCurrentThread() {
return super.isWriteLockedByCurrentThread();
}
@Override
public ReadLock readLock() {
return new ReadLockDebug(this);
}
ReadLock realReadLock() {
return super.readLock();
}
WriteLock realWriteLock() {
return super.writeLock();
}
@Override
public String toString() {
return super.toString();
}
@Override
public WriteLockDebug writeLock() {
return writeLock;
}
void addLockedReadLock(ReadLockDebug lock) {
lockedReadLocks.add(lock);
}
void removeReadLock(ReadLockDebug lock) {
lockedReadLocks.remove(lock);
}
public Set<ReadLockDebug> getLockedReadLocks() {
return lockedReadLocks;
}
}
| 360 |
0 | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/sparql | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/sparql/providers/ResultSetXmlMessageBodyWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.jaxrs.sparql.providers;
import org.apache.clerezza.IRI;
import org.apache.clerezza.Language;
import org.apache.clerezza.Literal;
import org.apache.clerezza.RDFTerm;
import org.apache.clerezza.sparql.ResultSet;
import org.apache.clerezza.sparql.SolutionMapping;
import org.apache.clerezza.sparql.query.Variable;
import org.osgi.service.component.annotations.Component;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import javax.ws.rs.ext.Providers;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Source;
import javax.xml.transform.dom.DOMSource;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.List;
import java.util.Set;
/**
* MessageBodyWirter for <code>ResultSet</code>.
* Resulting output conforms to:
* http://www.w3.org/TR/2008/REC-rdf-sparql-XMLres-20080115/
*
* @author mir, reto
*/
@Component(service = Object.class, property = {"javax.ws.rs=true"})
@Produces({"application/xml", "text/xml", "application/sparql-results+xml"})
@Provider
public class ResultSetXmlMessageBodyWriter implements MessageBodyWriter<ResultSet> {
private Providers providers;
final Logger logger = LoggerFactory.getLogger(ResultSetXmlMessageBodyWriter.class);
@Override
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations,
MediaType mediaType) {
return ResultSet.class.isAssignableFrom(type);
}
@Override
public long getSize(ResultSet t, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) {
return -1;
}
@Override
public void writeTo(ResultSet resultSet, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType, MultivaluedMap<String,
Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
Source source = toXmlSource(resultSet);
MessageBodyWriter<Source> sourceMessageBodyWriter =
providers.getMessageBodyWriter(Source.class, null, null, mediaType);
sourceMessageBodyWriter.writeTo(source, Source.class, null, null, mediaType,
httpHeaders, entityStream);
}
@Context
public void setProviders(Providers providers) {
this.providers = providers;
}
/**
* Helper: transforms a {@link ResultSet} or a {@link Boolean} to a
* {@link DOMSource}
*
* @param queryResult
*/
private Source toXmlSource(ResultSet queryResult) {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
try {
Document doc = dbf.newDocumentBuilder().newDocument();
// adding root element
Element root = doc.createElement("sparql");
root.setAttribute("xmlns", "http://www.w3.org/2005/sparql-results#");
doc.appendChild(root);
Element head = doc.createElement("head");
createVariables(queryResult.getResultVars(), head, doc);
root.appendChild(head);
Element results = doc.createElement("results");
while (queryResult.hasNext()) {
createResultElement(queryResult.next(), results, doc);
}
root.appendChild(results);
DOMSource source = new DOMSource(doc);
return source;
} catch (ParserConfigurationException e) {
throw createWebApplicationException(e);
}
}
/**
* Creates a WebApplicationException and prints a logger entry
*/
private WebApplicationException createWebApplicationException(Exception e) {
return new WebApplicationException(Response.status(Status.BAD_REQUEST)
.entity(e.getMessage().replace("<", "<").replace("\n",
"<br/>")).build());
}
/**
* Helper: creates value element from {@link RDFTerm} depending on its
* class
*/
private Element createValueElement(RDFTerm resource, Document doc) {
Element value = null;
if (resource instanceof IRI) {
value = doc.createElement("uri");
value.appendChild(doc.createTextNode(((IRI) resource)
.getUnicodeString()));
} else if (resource instanceof Literal) {
value = doc.createElement("literal");
value.appendChild(doc.createTextNode(((Literal) resource)
.getLexicalForm()));
value.setAttribute("datatype", (((Literal) resource)
.getDataType().getUnicodeString()));
Language lang = ((Literal) resource).getLanguage();
if (lang != null) {
value.setAttribute("xml:lang", (lang.toString()));
}
} else {
value = doc.createElement("bnode");
value.appendChild(doc.createTextNode("/"));
}
return value;
}
/**
* Helper: creates results element from ResultSet
*/
private void createResultElement(SolutionMapping solutionMap, Element results, Document doc) {
Set<Variable> keys = solutionMap.keySet();
Element result = doc.createElement("result");
results.appendChild(result);
for (Variable key : keys) {
Element bindingElement = doc.createElement("binding");
bindingElement.setAttribute("name", key.getName());
bindingElement.appendChild(createValueElement((RDFTerm) solutionMap.get(key), doc));
result.appendChild(bindingElement);
}
}
private void createVariables(List<String> variables, Element head, Document doc) {
for (String variable : variables) {
Element varElement = doc.createElement("variable");
varElement.setAttribute("name", variable);
head.appendChild(varElement);
}
}
}
| 361 |
0 | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/sparql | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/sparql/providers/ResultSetJsonMessageBodyWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.jaxrs.sparql.providers;
import org.apache.clerezza.*;
import org.apache.clerezza.sparql.ResultSet;
import org.apache.clerezza.sparql.SolutionMapping;
import org.apache.clerezza.sparql.query.Variable;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.osgi.service.component.annotations.Component;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.List;
import java.util.Set;
/**
* MessageBodyWirter for <code>ResultSet</code>.
* Resulting output conforms to:
* http://www.w3.org/TR/2007/NOTE-rdf-sparql-json-res-20070618/
*
* @author misl
*/
@Component(service = Object.class, property = {"javax.ws.rs=true"})
@Produces({"application/json", "application/sparql-results+json"})
@Provider
@SuppressWarnings("unchecked")
public class ResultSetJsonMessageBodyWriter implements MessageBodyWriter<ResultSet> {
final Logger logger = LoggerFactory.getLogger(ResultSetJsonMessageBodyWriter.class);
@Override
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations,
MediaType mediaType) {
return ResultSet.class.isAssignableFrom(type);
}
@Override
public long getSize(ResultSet t, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) {
return -1;
}
@Override
public void writeTo(ResultSet resultSet, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType, MultivaluedMap<String,
Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
JSONObject json = toJsonSource(resultSet);
entityStream.write(json.toJSONString().getBytes("UTF-8"));
}
/**
* Helper: transforms a {@link ResultSet} or a {@link Boolean} to a
* json object.
*
* @param queryResult
*/
private JSONObject toJsonSource(ResultSet queryResult) {
JSONObject root = new JSONObject();
JSONObject head = new JSONObject();
root.put("head", head);
createVariables(queryResult.getResultVars(), head);
JSONObject results = new JSONObject();
root.put("results", results);
JSONArray bindings = null;
while (queryResult.hasNext()) {
if (bindings == null) {
bindings = new JSONArray();
results.put("bindings", bindings);
}
bindings.add(createResult(queryResult.next()));
}
return root;
}
/**
* Helper: creates value element from {@link RDFTerm} depending on its
* class
*/
private JSONObject createResultElement(RDFTerm resource) {
JSONObject element = new JSONObject();
if (resource instanceof IRI) {
element.put("type", "uri");
element.put("value", IRI.class.cast(resource).getUnicodeString());
} else if (resource instanceof Literal) {
element.put("type", "literal");
element.put("value", Literal.class.cast(resource).getLexicalForm());
Language lang = Literal.class.cast(resource).getLanguage();
if (lang != null) {
element.put("xml:lang", lang.toString());
}
} else if (resource instanceof Literal) {
element.put("type", "typed-literal");
element.put("datatype", Literal.class.cast(resource).getDataType().getUnicodeString());
element.put("value", Literal.class.cast(resource).getLexicalForm());
} else if (resource instanceof BlankNode) {
element.put("type", "bnode");
element.put("value", "/");
} else {
element = null;
}
return element;
}
/**
* Helper: creates results element from ResultSet
*/
private JSONObject createResult(SolutionMapping solutionMap) {
JSONObject result = new JSONObject();
Set<Variable> keys = solutionMap.keySet();
for (Variable key : keys) {
result.put(key.getName(), createResultElement((RDFTerm) solutionMap.get(key)));
}
return result;
}
private void createVariables(List<String> variables, JSONObject head) {
JSONArray vars = null;
for (String variable : variables) {
if (vars == null) {
vars = new JSONArray();
head.put("vars", vars);
}
vars.add(variable);
}
}
}
| 362 |
0 | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/sparql | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/sparql/providers/ResultSetTsvMessageBodyWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.jaxrs.sparql.providers;
import org.apache.clerezza.BlankNode;
import org.apache.clerezza.IRI;
import org.apache.clerezza.Literal;
import org.apache.clerezza.RDFTerm;
import org.apache.clerezza.sparql.ResultSet;
import org.apache.clerezza.sparql.SolutionMapping;
import org.osgi.service.component.annotations.Component;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.List;
/**
* MessageBodyWriter for <code>ResultSet</code>. Resulting output is tsv and
* conforms to:
* http://www.w3.org/TR/2013/REC-sparql11-results-csv-tsv-20130321/#tsv
*
* Also see: http://www.iana.org/assignments/media-types/text/tab-separated-values
*
* @author misl
*/
@Component(service = Object.class, property = {"javax.ws.rs=true"})
@Produces({"text/tab-separated-values"})
@Provider
public class ResultSetTsvMessageBodyWriter implements MessageBodyWriter<ResultSet> {
private static final Logger logger = LoggerFactory
.getLogger(ResultSetTsvMessageBodyWriter.class);
private String textEncoding = "UTF-8";
private byte[] separator;
public ResultSetTsvMessageBodyWriter() {
try {
buildSeparatorConformEncoding(textEncoding);
} catch (UnsupportedEncodingException e) {
logger.error("Developer error", e);
}
}
// --------------------------------------------------------------------------
// Implementing MessageBodyWriter
// --------------------------------------------------------------------------
@Override
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations,
MediaType mediaType) {
return ResultSet.class.isAssignableFrom(type);
}
@Override
public long getSize(ResultSet t, Class<?> type, Type genericType, Annotation[] annotations,
MediaType mediaType) {
return -1;
}
@Override
public void writeTo(ResultSet resultSet, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders,
OutputStream entityStream) throws IOException, WebApplicationException {
// According to spec header is mandatory.
writeTsvHeader(entityStream, resultSet.getResultVars());
while (resultSet.hasNext()) {
writeTsvLine(entityStream, resultSet.getResultVars(), resultSet.next());
}
}
// --------------------------------------------------------------------------
// Public interface
// --------------------------------------------------------------------------
/**
* Sets the text encoding for the resource. This setting must only used
* if the resource response represents text.
*
* @param textEncoding character encoding of text body
* @throws UnsupportedEncodingException when the given encoding is not supported.
*/
public void setTextEncoding(String textEncoding) throws UnsupportedEncodingException {
buildSeparatorConformEncoding(textEncoding);
this.textEncoding = textEncoding;
}
/**
* @return text encoding for resource
*/
protected String getTextEncoding() {
return textEncoding;
}
// --------------------------------------------------------------------------
// Private methods
// --------------------------------------------------------------------------
/**
* Builds the column separator according to the given text encoding.
*
* @param encoding the text encoding to be used.
* @throws UnsupportedEncodingException when the given encoding is not supported.
*/
private void buildSeparatorConformEncoding(String encoding) throws UnsupportedEncodingException {
separator = ",".getBytes(encoding);
}
/**
* Write result set header to the given output stream.
*
* @param outputStream stream to write to.
* @param headers the headers to write.
* @throws IOException
*/
private void writeTsvHeader(OutputStream outputStream, List<String> headers) throws IOException {
boolean first = true;
for (String header : headers) {
if (!first) {
outputStream.write(separator);
}
writeEscaped(outputStream, header);
first = false;
}
outputStream.write("\n".getBytes(textEncoding));
}
/**
* Write a single tsv line using the given line data.
*
* @param outputStream stream to write to.
* @param headers the headers to write line data for.
* @param lineData the line data to write.
* @throws IOException
*/
private void writeTsvLine(OutputStream outputStream, List<String> headers,
SolutionMapping lineData) throws IOException {
boolean first = true;
for (String header : headers) {
if (!first) {
outputStream.write(separator);
}
RDFTerm resource = lineData.get(header);
if (resource != null) {
writeEscaped(outputStream, getResourceValue(resource));
}
first = false;
}
outputStream.write("\n".getBytes(textEncoding));
}
/**
* Helper to get the proper string representation for the given RDFTerm.
*/
private String getResourceValue(RDFTerm resource) {
StringBuilder value = new StringBuilder();
if (resource instanceof IRI) {
value.append(resource.toString());
} else if (resource instanceof Literal) {
value.append("\"");
value.append(escapedDQuotes(((Literal) resource).getLexicalForm()));
value.append("\"");
} else if (resource instanceof BlankNode) {
value.append("/");
} else {
value.append(resource.toString());
}
return value.toString();
}
/**
* Write the given string to the output stream and escape the output where
* necessary.
*
* @param outputStream stream to write to.
* @param text the text to write.
* @throws IOException
*/
private void writeEscaped(OutputStream outputStream, String text) throws IOException {
String line = text;
if (text.contains("\r")) {
line = text.replaceAll("\r", "\\r");
}
if (text.contains("\n")) {
line = text.replaceAll("\n", "\\n");
}
if (text.contains("\t")) {
line = text.replaceAll("\t", "\\t");
}
outputStream.write(line.getBytes(textEncoding));
}
private String escapedDQuotes(String text) {
String line = text;
if (text.contains("\"")) {
line = text.replaceAll("\"", "\"\"");
}
return line;
}
}
| 363 |
0 | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/sparql | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/sparql/providers/ResultSetCsvMessageBodyWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.jaxrs.sparql.providers;
import org.apache.clerezza.BlankNode;
import org.apache.clerezza.IRI;
import org.apache.clerezza.Literal;
import org.apache.clerezza.RDFTerm;
import org.apache.clerezza.sparql.ResultSet;
import org.apache.clerezza.sparql.SolutionMapping;
import org.osgi.service.component.annotations.Component;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.List;
/**
* MessageBodyWriter for <code>ResultSet</code>. Resulting output is csv and
* conforms to:
* http://www.w3.org/TR/2013/REC-sparql11-results-csv-tsv-20130321/#csv
*
* Also see: http://tools.ietf.org/html/rfc4180
*
* @author misl
*/
@Component(service = Object.class, property = {"javax.ws.rs=true"})
@Produces({"text/csv"})
@Provider
public class ResultSetCsvMessageBodyWriter implements MessageBodyWriter<ResultSet> {
private static final Logger logger = LoggerFactory
.getLogger(ResultSetCsvMessageBodyWriter.class);
private String textEncoding = "UTF-8";
private byte[] separator;
public ResultSetCsvMessageBodyWriter() {
try {
buildSeparatorConformEncoding(textEncoding);
} catch (UnsupportedEncodingException e) {
logger.error("Developer error", e);
}
}
// --------------------------------------------------------------------------
// Implementing MessageBodyWriter
// --------------------------------------------------------------------------
@Override
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations,
MediaType mediaType) {
return ResultSet.class.isAssignableFrom(type);
}
@Override
public long getSize(ResultSet t, Class<?> type, Type genericType, Annotation[] annotations,
MediaType mediaType) {
return -1;
}
@Override
public void writeTo(ResultSet resultSet, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders,
OutputStream entityStream) throws IOException, WebApplicationException {
// According to spec header is mandatory.
writeCsvHeader(entityStream, resultSet.getResultVars());
while (resultSet.hasNext()) {
writeCsvLine(entityStream, resultSet.getResultVars(), resultSet.next());
}
}
// --------------------------------------------------------------------------
// Public interface
// --------------------------------------------------------------------------
/**
* Sets the text encoding for the resource. This setting must only used
* if the resource response represents text.
*
* @param textEncoding character encoding of text body
* @throws UnsupportedEncodingException when the given encoding is not supported.
*/
public void setTextEncoding(String textEncoding) throws UnsupportedEncodingException {
buildSeparatorConformEncoding(textEncoding);
this.textEncoding = textEncoding;
}
/**
* @return text encoding for resource
*/
protected String getTextEncoding() {
return textEncoding;
}
// --------------------------------------------------------------------------
// Private methods
// --------------------------------------------------------------------------
/**
* Builds the column separator according to the given text encoding.
*
* @param encoding the text encoding to be used.
* @throws UnsupportedEncodingException when the given encoding is not supported.
*/
private void buildSeparatorConformEncoding(String encoding) throws UnsupportedEncodingException {
separator = ",".getBytes(encoding);
}
/**
* Write result set header to the given output stream.
*
* @param outputStream stream to write to.
* @param headers the headers to write.
* @throws IOException
*/
private void writeCsvHeader(OutputStream outputStream, List<String> headers) throws IOException {
boolean first = true;
for (String header : headers) {
if (!first) {
outputStream.write(separator);
}
writeEscaped(outputStream, header);
first = false;
}
outputStream.write("\n".getBytes(textEncoding));
}
/**
* Write a single csv line using the given line data.
*
* @param outputStream stream to write to.
* @param headers the headers to write line data for.
* @param lineData the line data to write.
* @throws IOException
*/
private void writeCsvLine(OutputStream outputStream, List<String> headers,
SolutionMapping lineData) throws IOException {
boolean first = true;
for (String header : headers) {
if (!first) {
outputStream.write(separator);
}
RDFTerm resource = lineData.get(header);
if (resource != null) {
writeEscaped(outputStream, getResourceValue(resource));
}
first = false;
}
outputStream.write("\n".getBytes(textEncoding));
}
/**
* Helper to get the proper string representation for the given RDFTerm.
*/
private String getResourceValue(RDFTerm resource) {
StringBuilder value = new StringBuilder();
if (resource instanceof IRI) {
value.append(((IRI) resource).getUnicodeString());
} else if (resource instanceof Literal) {
value.append(((Literal) resource).getLexicalForm());
} else if (resource instanceof BlankNode) {
value.append("/");
} else {
value.append(resource.toString());
}
return value.toString();
}
/**
* Write the given string to the output stream and escape the output where
* necessary.
*
* @param outputStream stream to write to.
* @param text the text to write.
* @throws IOException
*/
private void writeEscaped(OutputStream outputStream, String text) throws IOException {
String line = text;
if (text.contains("\r") || text.contains("\n") || text.contains(",")
|| text.contains("\"")) {
StringBuilder builder = new StringBuilder();
builder.append('"');
builder.append(text.replaceAll("\"", "\"\""));
builder.append('"');
line = builder.toString();
}
outputStream.write(line.getBytes(textEncoding));
}
}
| 364 |
0 | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/rdf | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/rdf/providers/GraphWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.jaxrs.rdf.providers;
import org.apache.clerezza.Graph;
import org.apache.clerezza.representation.Serializer;
import org.apache.clerezza.representation.SupportedFormat;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.Objects;
@Component(service = Object.class, property = {"javax.ws.rs=true"})
@Provider
@Produces({SupportedFormat.N3, SupportedFormat.N_TRIPLE,
SupportedFormat.RDF_XML, SupportedFormat.TURTLE,
SupportedFormat.X_TURTLE, SupportedFormat.RDF_JSON})
public class GraphWriter implements MessageBodyWriter<Graph> {
private Serializer serializer = Serializer.getInstance();
@Reference
public synchronized void setSerializer(Serializer serializer) {
this.serializer = serializer;
}
public synchronized void unsetSerializer(Serializer serializer) {
if (Objects.equals(this.serializer, serializer)) {
this.serializer = Serializer.getInstance();
}
}
@Override
public boolean isWriteable(Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) {
return Graph.class.isAssignableFrom(type);
}
@Override
public long getSize(Graph t, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) {
return -1;
}
@Override
public void writeTo(Graph t, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders,
OutputStream entityStream) throws IOException, WebApplicationException {
serializer.serialize(entityStream, t, mediaType.toString());
}
}
| 365 |
0 | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/rdf | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/rdf/providers/GraphNodeWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.jaxrs.rdf.providers;
import org.apache.clerezza.Graph;
import org.apache.clerezza.IRI;
import org.apache.clerezza.RDFTerm;
import org.apache.clerezza.Triple;
import org.apache.clerezza.implementation.in_memory.SimpleGraph;
import org.apache.clerezza.utils.GraphNode;
import org.apache.clerezza.representation.Serializer;
import org.apache.clerezza.representation.SupportedFormat;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
/**
* By default this returns a serialization of the context of the GraphNode.
*
* The expansion can be widened by using the query parameters xPropObj and
* xProSubj. These parameters specify property uris (both parameters might be
* repeated). For the specified properties their objects respectively subjects
* are expanded as if they were bnodes.
*
* @author reto
*/
@Component(service = Object.class, property = {"javax.ws.rs=true"})
@Provider
@Produces({SupportedFormat.N3, SupportedFormat.N_TRIPLE,
SupportedFormat.RDF_XML, SupportedFormat.TURTLE,
SupportedFormat.X_TURTLE, SupportedFormat.RDF_JSON})
public class GraphNodeWriter implements MessageBodyWriter<GraphNode> {
public static final String OBJ_EXP_PARAM = "xPropObj";
public static final String SUBJ_EXP_PARAM = "xPropSubj";
private UriInfo uriInfo;
private Serializer serializer = Serializer.getInstance();
@Reference
public synchronized void setSerializer(Serializer serializer) {
this.serializer = serializer;
}
public synchronized void unsetSerializer(Serializer serializer) {
if (Objects.equals(this.serializer, serializer)) {
this.serializer = Serializer.getInstance();
}
}
@Override
public boolean isWriteable(Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) {
return GraphNode.class.isAssignableFrom(type);
}
@Override
public long getSize(GraphNode n, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) {
return -1;
}
@Override
public void writeTo(GraphNode node, Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders,
OutputStream entityStream) throws IOException, WebApplicationException {
serializer.serialize(entityStream, getExpandedContext(node), mediaType.toString());
}
@Context
public void setUriInfo(UriInfo uriInfo) {
this.uriInfo = uriInfo;
}
private Graph getExpandedContext(GraphNode node) {
final Graph result = new SimpleGraph(node.getNodeContext());
final Set<RDFTerm> expandedResources = new HashSet<RDFTerm>();
expandedResources.add(node.getNode());
while (true) {
Set<RDFTerm> additionalExpansionRes = getAdditionalExpansionResources(result);
additionalExpansionRes.removeAll(expandedResources);
if (additionalExpansionRes.size() == 0) {
return result;
}
for (RDFTerm resource : additionalExpansionRes) {
final GraphNode additionalNode = new GraphNode(resource, node.getGraph());
result.addAll(additionalNode.getNodeContext());
expandedResources.add(resource);
}
}
}
private Set<RDFTerm> getAdditionalExpansionResources(Graph tc) {
final Set<IRI> subjectExpansionProperties = getSubjectExpansionProperties();
final Set<IRI> objectExpansionProperties = getObjectExpansionProperties();
final Set<RDFTerm> result = new HashSet<RDFTerm>();
if ((subjectExpansionProperties.size() > 0)
|| (objectExpansionProperties.size() > 0)) {
for (Triple triple : tc) {
final IRI predicate = triple.getPredicate();
if (subjectExpansionProperties.contains(predicate)) {
result.add(triple.getSubject());
}
if (objectExpansionProperties.contains(predicate)) {
result.add(triple.getObject());
}
}
}
return result;
}
private Set<IRI> getSubjectExpansionProperties() {
final MultivaluedMap<String, String> queryParams = uriInfo.getQueryParameters();
final List<String> paramValues = queryParams.get(SUBJ_EXP_PARAM);
if (paramValues == null) {
return new HashSet<IRI>(0);
}
final Set<IRI> result = new HashSet<IRI>(paramValues.size());
for (String uriString : paramValues) {
result.add(new IRI(uriString));
}
return result;
}
private Set<IRI> getObjectExpansionProperties() {
final MultivaluedMap<String, String> queryParams = uriInfo.getQueryParameters();
final List<String> paramValues = queryParams.get(OBJ_EXP_PARAM);
if (paramValues == null) {
return new HashSet<IRI>(0);
}
final Set<IRI> result = new HashSet<IRI>(paramValues.size());
for (String uriString : paramValues) {
result.add(new IRI(uriString));
}
return result;
}
}
| 366 |
0 | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/rdf | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/rdf/providers/GraphReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.jaxrs.rdf.providers;
import org.apache.clerezza.Graph;
import org.apache.clerezza.implementation.in_memory.SimpleGraph;
import org.apache.clerezza.representation.Parser;
import org.apache.clerezza.representation.SupportedFormat;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import javax.ws.rs.Consumes;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyReader;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
import java.io.InputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.Objects;
@Component(service = Object.class, property = {"javax.ws.rs=true"})
@Provider
@Consumes({SupportedFormat.N3, SupportedFormat.N_TRIPLE,
SupportedFormat.RDF_XML, SupportedFormat.TURTLE,
SupportedFormat.X_TURTLE, SupportedFormat.RDF_JSON})
public class GraphReader implements MessageBodyReader<Graph> {
private Parser parser = Parser.getInstance();
@Reference
public synchronized void setParser(Parser parser) {
this.parser = parser;
}
public synchronized void unsetParser(Parser parser) {
if (Objects.equals(this.parser, parser)) {
this.parser = Parser.getInstance();
}
}
@Override
public boolean isReadable(Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) {
return type.isAssignableFrom(Graph.class);
}
@Override
public Graph readFrom(Class<Graph> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, String> httpHeaders, InputStream entityStream) throws IOException, WebApplicationException {
Graph result = new SimpleGraph();
return parser.parse(entityStream, mediaType.toString());
}
}
| 367 |
0 | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/rdf | Create_ds/clerezza/jaxrs.rdf.providers/src/main/java/org/apache/clerezza/jaxrs/rdf/providers/ImmutableGraphReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.jaxrs.rdf.providers;
import org.apache.clerezza.ImmutableGraph;
import org.apache.clerezza.representation.Parser;
import org.apache.clerezza.representation.SupportedFormat;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import javax.ws.rs.Consumes;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyReader;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
import java.io.InputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.Objects;
@Component(service = Object.class, property = {"javax.ws.rs=true"})
@Provider
@Consumes({SupportedFormat.N3, SupportedFormat.N_TRIPLE,
SupportedFormat.RDF_XML, SupportedFormat.TURTLE,
SupportedFormat.X_TURTLE, SupportedFormat.RDF_JSON})
public class ImmutableGraphReader implements MessageBodyReader<ImmutableGraph> {
private Parser parser;
@Reference
public synchronized void setParser(Parser parser) {
this.parser = parser;
}
public synchronized void unsetParser(Parser parser) {
if (Objects.equals(this.parser, parser)) {
this.parser = null;
}
}
@Override
public boolean isReadable(Class<?> type, Type genericType,
Annotation[] annotations, MediaType mediaType) {
return type.isAssignableFrom(ImmutableGraph.class);
}
@Override
public ImmutableGraph readFrom(Class<ImmutableGraph> type, Type genericType,
Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, String> httpHeaders,
InputStream entityStream)
throws IOException, WebApplicationException {
return parser.parse(entityStream, mediaType.toString());
}
}
| 368 |
0 | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/DadminTest.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.clerezza.commons.rdf.impl.sparql;
import com.hp.hpl.jena.query.DatasetAccessor;
import com.hp.hpl.jena.query.DatasetAccessorFactory;
import java.io.IOException;
import java.net.ServerSocket;
import org.apache.jena.fuseki.EmbeddedFusekiServer;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import java.io.InputStream;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.apache.clerezza.commons.rdf.Graph;
import org.apache.clerezza.commons.rdf.IRI;
import org.apache.clerezza.commons.rdf.Language;
import org.apache.clerezza.commons.rdf.Literal;
import org.apache.clerezza.commons.rdf.RDFTerm;
import org.apache.clerezza.commons.rdf.Triple;
import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
import org.apache.clerezza.rdf.core.serializedform.Serializer;
import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
* @author reto
*/
public class DadminTest {
final static int serverPort = findFreePort();
static EmbeddedFusekiServer server;
@BeforeClass
public static void prepare() throws IOException {
final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
final InputStream in = DadminTest.class.getResourceAsStream("dadmin.ttl");
final Model m = ModelFactory.createDefaultModel();
String base = "http://example.org/";
m.read(in, base, "TURTLE");
server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
server.start();
System.out.println("Started fuseki on port " + serverPort);
accessor.putModel(m);
}
@AfterClass
public static void cleanup() {
server.stop();
}
@Test
public void graphSize() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
Assert.assertEquals("Graph not of the exepected size", 1, graph.size());
}
@Test
public void dump() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
Serializer serializer = Serializer.getInstance();
serializer.serialize(System.out, graph, SupportedFormat.TURTLE);
}
public static int findFreePort() {
int port = 0;
try (ServerSocket server = new ServerSocket(0);) {
port = server.getLocalPort();
} catch (Exception e) {
throw new RuntimeException("unable to find a free port");
}
return port;
}
}
| 369 |
0 | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/Dadmin2Test.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.clerezza.commons.rdf.impl.sparql;
import com.hp.hpl.jena.query.DatasetAccessor;
import com.hp.hpl.jena.query.DatasetAccessorFactory;
import java.io.IOException;
import java.net.ServerSocket;
import org.apache.jena.fuseki.EmbeddedFusekiServer;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import java.io.InputStream;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.apache.clerezza.commons.rdf.Graph;
import org.apache.clerezza.commons.rdf.IRI;
import org.apache.clerezza.commons.rdf.Language;
import org.apache.clerezza.commons.rdf.Literal;
import org.apache.clerezza.commons.rdf.RDFTerm;
import org.apache.clerezza.commons.rdf.Triple;
import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
import org.apache.clerezza.rdf.core.serializedform.Serializer;
import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
* @author reto
*/
public class Dadmin2Test {
final static int serverPort = findFreePort();
static EmbeddedFusekiServer server;
@BeforeClass
public static void prepare() throws IOException {
final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
final InputStream in = Dadmin2Test.class.getResourceAsStream("dadmin2.ttl");
final Model m = ModelFactory.createDefaultModel();
String base = "http://example.org/";
m.read(in, base, "TURTLE");
server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
server.start();
System.out.println("Started fuseki on port " + serverPort);
accessor.putModel(m);
}
@AfterClass
public static void cleanup() {
server.stop();
}
@Test
public void graphSize() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
Assert.assertEquals("Graph not of the exepected size", 12, graph.size());
}
@Test
public void dump() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
Serializer serializer = Serializer.getInstance();
serializer.serialize(System.out, graph, SupportedFormat.TURTLE);
}
public static int findFreePort() {
int port = 0;
try (ServerSocket server = new ServerSocket(0);) {
port = server.getLocalPort();
} catch (Exception e) {
throw new RuntimeException("unable to find a free port");
}
return port;
}
}
| 370 |
0 | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SimilarBNodes.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.clerezza.commons.rdf.impl.sparql;
import com.hp.hpl.jena.query.DatasetAccessor;
import com.hp.hpl.jena.query.DatasetAccessorFactory;
import java.io.IOException;
import java.net.ServerSocket;
import org.apache.jena.fuseki.EmbeddedFusekiServer;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import java.io.InputStream;
import java.util.Iterator;
import org.apache.clerezza.commons.rdf.BlankNode;
import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
import org.apache.clerezza.commons.rdf.Graph;
import org.apache.clerezza.commons.rdf.IRI;
import org.apache.clerezza.commons.rdf.Triple;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
* @author reto
*/
public class SimilarBNodes {
final static int serverPort = findFreePort();
static EmbeddedFusekiServer server;
@BeforeClass
public static void prepare() throws IOException {
final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
final InputStream in = SimilarBNodes.class.getResourceAsStream("similar-bnodes.ttl");
final Model m = ModelFactory.createDefaultModel();
String base = "http://example.org/";
m.read(in, base, "TURTLE");
server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
server.start();
System.out.println("Started fuseki on port " + serverPort);
accessor.putModel(m);
}
@AfterClass
public static void cleanup() {
server.stop();
}
@Test
public void graphSize() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
Assert.assertEquals("Graph not of the exepected size", 2, graph.size());
}
@Test
public void foafKnowsFilter() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
final IRI foafKnows = new IRI("http://xmlns.com/foaf/0.1/knows");
final Iterator<Triple> iter = graph.filter(null, foafKnows, null);
Assert.assertTrue(iter.hasNext());
final Triple triple1 = iter.next();
final BlankNodeOrIRI subject1 = triple1.getSubject();
Assert.assertTrue(subject1 instanceof BlankNode);
Assert.assertTrue(iter.hasNext());
final Triple triple2 = iter.next();
final BlankNodeOrIRI subject2 = triple2.getSubject();
Assert.assertTrue(subject2 instanceof BlankNode);
Assert.assertNotEquals(subject1, subject2);
}
public static int findFreePort() {
int port = 0;
try (ServerSocket server = new ServerSocket(0);) {
port = server.getLocalPort();
} catch (Exception e) {
throw new RuntimeException("unable to find a free port");
}
return port;
}
}
| 371 |
0 | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraphTest.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.clerezza.commons.rdf.impl.sparql;
import com.hp.hpl.jena.query.DatasetAccessor;
import com.hp.hpl.jena.query.DatasetAccessorFactory;
import java.io.IOException;
import java.net.ServerSocket;
import org.apache.jena.fuseki.EmbeddedFusekiServer;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import java.io.InputStream;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.apache.clerezza.commons.rdf.Graph;
import org.apache.clerezza.commons.rdf.IRI;
import org.apache.clerezza.commons.rdf.Language;
import org.apache.clerezza.commons.rdf.Literal;
import org.apache.clerezza.commons.rdf.RDFTerm;
import org.apache.clerezza.commons.rdf.Triple;
import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
* @author reto
*/
public class SparqlGraphTest {
final static int serverPort = findFreePort();
static EmbeddedFusekiServer server;
@BeforeClass
public static void prepare() throws IOException {
final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
final InputStream in = SparqlGraphTest.class.getResourceAsStream("grounded.ttl");
final Model m = ModelFactory.createDefaultModel();
String base = "http://example.org/";
m.read(in, base, "TURTLE");
server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
server.start();
System.out.println("Started fuseki on port " + serverPort);
accessor.putModel(m);
}
@AfterClass
public static void cleanup() {
server.stop();
}
@Test
public void graphSize() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
Assert.assertEquals("Graph not of the exepected size", 8, graph.size());
}
@Test
public void filter1() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
final IRI spiderman = new IRI("http://example.org/#spiderman");
final IRI greenGoblin = new IRI("http://example.org/#green-goblin");
final IRI enemyOf = new IRI("http://www.perceive.net/schemas/relationship/enemyOf");
final IRI foafName = new IRI("http://xmlns.com/foaf/0.1/name");
{
final Iterator<Triple> iter = graph.filter(spiderman, null, greenGoblin);
Assert.assertTrue(iter.hasNext());
Assert.assertEquals(enemyOf, iter.next().getPredicate());
Assert.assertFalse(iter.hasNext());
}
{
final Iterator<Triple> iter = graph.filter(spiderman, foafName, null);
Set<Literal> names = new HashSet<>();
for (int i = 0; i < 2; i++) {
Assert.assertTrue(iter.hasNext());
RDFTerm name = iter.next().getObject();
Assert.assertTrue(name instanceof Literal);
names.add((Literal)name);
}
Assert.assertFalse(iter.hasNext());
Assert.assertTrue(names.contains(new PlainLiteralImpl("Spiderman")));
Assert.assertTrue(names.contains(new PlainLiteralImpl("Человек-паук", new Language("ru"))));
}
}
public static int findFreePort() {
int port = 0;
try (ServerSocket server = new ServerSocket(0);) {
port = server.getLocalPort();
} catch (Exception e) {
throw new RuntimeException("unable to find a free port");
}
return port;
}
}
| 372 |
0 | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeTest.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.clerezza.commons.rdf.impl.sparql;
import com.hp.hpl.jena.query.DatasetAccessor;
import com.hp.hpl.jena.query.DatasetAccessorFactory;
import java.io.IOException;
import java.net.ServerSocket;
import org.apache.jena.fuseki.EmbeddedFusekiServer;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import java.io.InputStream;
import java.util.Iterator;
import org.apache.clerezza.commons.rdf.BlankNode;
import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
import org.apache.clerezza.commons.rdf.Graph;
import org.apache.clerezza.commons.rdf.IRI;
import org.apache.clerezza.commons.rdf.RDFTerm;
import org.apache.clerezza.commons.rdf.Triple;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
* @author reto
*/
public class BNodeTest {
final static int serverPort = findFreePort();
static EmbeddedFusekiServer server;
@BeforeClass
public static void prepare() throws IOException {
final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
final InputStream in = BNodeTest.class.getResourceAsStream("simple-bnode.ttl");
final Model m = ModelFactory.createDefaultModel();
String base = "http://example.org/";
m.read(in, base, "TURTLE");
server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
server.start();
System.out.println("Started fuseki on port " + serverPort);
accessor.putModel(m);
}
@AfterClass
public static void cleanup() {
server.stop();
}
@Test
public void graphSize() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
Assert.assertEquals("Graph not of the expected size", 3, graph.size());
}
/* Filtering with a Bode that cannot be in graph
*/
@Test
public void filterAlienBNode() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
final BlankNode blankNode = new BlankNode();
final Iterator<Triple> iter = graph.filter(blankNode, null, null);
Assert.assertFalse(iter.hasNext());
}
@Test
public void bNodeIdentity() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
final IRI foafPerson = new IRI("http://xmlns.com/foaf/0.1/Person");
final IRI foafName = new IRI("http://xmlns.com/foaf/0.1/name");
final IRI foafKnows = new IRI("http://xmlns.com/foaf/0.1/knows");
final IRI rdfType = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
final Iterator<Triple> iter = graph.filter(null, foafName, null);
Assert.assertTrue(iter.hasNext());
final BlankNodeOrIRI namedThing = iter.next().getSubject();
Assert.assertTrue(namedThing instanceof BlankNode);
final Iterator<Triple> iter2 = graph.filter(null, rdfType, foafPerson);
Assert.assertTrue(iter2.hasNext());
final BlankNodeOrIRI person = iter2.next().getSubject();
Assert.assertTrue(person instanceof BlankNode);
Assert.assertEquals(namedThing, person);
final Iterator<Triple> iter3 = graph.filter(null, foafKnows, null);
Assert.assertTrue(iter3.hasNext());
final RDFTerm knownThing = iter3.next().getObject();
Assert.assertTrue(knownThing instanceof BlankNode);
Assert.assertEquals(knownThing, person);
Assert.assertEquals(namedThing, knownThing);
}
@Test
public void filter1() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
final IRI foafPerson = new IRI("http://xmlns.com/foaf/0.1/Person");
final IRI foafName = new IRI("http://xmlns.com/foaf/0.1/name");
final IRI rdfType = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
final Iterator<Triple> iter = graph.filter(null, foafName, null);
Assert.assertTrue(iter.hasNext());
final BlankNodeOrIRI person = iter.next().getSubject();
Assert.assertTrue(person instanceof BlankNode);
final Iterator<Triple> iter2 = graph.filter(person, rdfType, null);
Assert.assertTrue(iter2.hasNext());
}
public static int findFreePort() {
int port = 0;
try (ServerSocket server = new ServerSocket(0);) {
port = server.getLocalPort();
} catch (Exception e) {
throw new RuntimeException("unable to find a free port");
}
return port;
}
}
| 373 |
0 | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/BNodeCircleTest.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.clerezza.commons.rdf.impl.sparql;
import com.hp.hpl.jena.query.DatasetAccessor;
import com.hp.hpl.jena.query.DatasetAccessorFactory;
import java.io.IOException;
import java.net.ServerSocket;
import org.apache.jena.fuseki.EmbeddedFusekiServer;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import java.io.InputStream;
import java.util.Iterator;
import org.apache.clerezza.commons.rdf.BlankNode;
import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
import org.apache.clerezza.commons.rdf.Graph;
import org.apache.clerezza.commons.rdf.IRI;
import org.apache.clerezza.commons.rdf.RDFTerm;
import org.apache.clerezza.commons.rdf.Triple;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
* @author reto
*/
public class BNodeCircleTest {
final static int serverPort = findFreePort();
static EmbeddedFusekiServer server;
@BeforeClass
public static void prepare() throws IOException {
final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
final InputStream in = BNodeCircleTest.class.getResourceAsStream("bnode-circle.ttl");
final Model m = ModelFactory.createDefaultModel();
String base = "http://example.org/";
m.read(in, base, "TURTLE");
server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
server.start();
System.out.println("Started fuseki on port " + serverPort);
accessor.putModel(m);
}
@AfterClass
public static void cleanup() {
server.stop();
}
@Test
public void graphSize() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
Assert.assertEquals("Graph not of the exepected size", 2, graph.size());
}
@Test
public void nullFilter() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
final Iterator<Triple> iter = graph.filter(null, null, null);
Assert.assertTrue(iter.hasNext());
final Triple triple1 = iter.next();
final BlankNodeOrIRI subject = triple1.getSubject();
final RDFTerm object = triple1.getObject();
Assert.assertTrue(subject instanceof BlankNode);
Assert.assertTrue(object instanceof BlankNode);
Assert.assertNotEquals(subject, object);
Assert.assertTrue(iter.hasNext());
}
@Test
public void foafKnowsFilter() {
final Graph graph = new SparqlGraph("http://localhost:" + serverPort + "/ds/query");
final IRI foafKnows = new IRI("http://xmlns.com/foaf/0.1/knows");
final Iterator<Triple> iter = graph.filter(null, foafKnows, null);
Assert.assertTrue(iter.hasNext());
final Triple triple1 = iter.next();
final BlankNodeOrIRI subject = triple1.getSubject();
final RDFTerm object = triple1.getObject();
Assert.assertTrue(subject instanceof BlankNode);
Assert.assertTrue(object instanceof BlankNode);
Assert.assertNotEquals(subject, object);
Assert.assertTrue(iter.hasNext());
}
public static int findFreePort() {
int port = 0;
try (ServerSocket server = new ServerSocket(0);) {
port = server.getLocalPort();
} catch (Exception e) {
throw new RuntimeException("unable to find a free port");
}
return port;
}
}
| 374 |
0 | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl | Create_ds/clerezza/impl.sparql/src/test/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlClientTest.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.clerezza.commons.rdf.impl.sparql;
import com.hp.hpl.jena.query.DatasetAccessor;
import com.hp.hpl.jena.query.DatasetAccessorFactory;
import java.io.IOException;
import java.net.ServerSocket;
import org.apache.jena.fuseki.EmbeddedFusekiServer;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import org.apache.clerezza.commons.rdf.Graph;
import org.apache.clerezza.commons.rdf.RDFTerm;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
* @author reto
*/
public class SparqlClientTest {
final static int serverPort = findFreePort();
static EmbeddedFusekiServer server;
@BeforeClass
public static void prepare() throws IOException {
final String serviceURI = "http://localhost:" + serverPort + "/ds/data";
final DatasetAccessor accessor = DatasetAccessorFactory.createHTTP(serviceURI);
final InputStream in = SparqlClientTest.class.getResourceAsStream("grounded.ttl");
final Model m = ModelFactory.createDefaultModel();
String base = "http://example.org/";
m.read(in, base, "TURTLE");
server = EmbeddedFusekiServer.memTDB(serverPort, "/ds");//dataSet.getAbsolutePath());
server.start();
System.out.println("Started fuseki on port " + serverPort);
accessor.putModel(m);
}
@AfterClass
public static void cleanup() {
server.stop();
}
@Test
public void select() throws IOException {
final SparqlClient sparqlClient = new SparqlClient(
"http://localhost:" + serverPort + "/ds/query");
List<Map<String, RDFTerm>> result = sparqlClient.queryResultSet(
"SELECT ?name WHERE { "
+ "<http://example.org/#spiderman> "
+ "<http://xmlns.com/foaf/0.1/name> ?name}");
Assert.assertEquals("There should be two names", 2, result.size());
}
@Test
public void ask() throws IOException {
final SparqlClient sparqlClient = new SparqlClient(
"http://localhost:" + serverPort + "/ds/query");
Object result = sparqlClient.queryResult(
"ASK { "
+ "<http://example.org/#spiderman> "
+ "<http://xmlns.com/foaf/0.1/name> ?name}");
Assert.assertEquals("ASK should result to true", Boolean.TRUE, result);
}
@Test
public void desribe() throws IOException {
final SparqlClient sparqlClient = new SparqlClient(
"http://localhost:" + serverPort + "/ds/query");
Object result = sparqlClient.queryResult(
"DESCRIBE <http://example.org/#spiderman>");
Assert.assertTrue("DESCRIBE should return a graph", result instanceof Graph);
}
public static int findFreePort() {
int port = 0;
try (ServerSocket server = new ServerSocket(0);) {
port = server.getLocalPort();
} catch (Exception e) {
throw new RuntimeException("unable to find a free port");
}
return port;
}
}
| 375 |
0 | Create_ds/clerezza/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl | Create_ds/clerezza/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlBNode.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.clerezza.commons.rdf.impl.sparql;
import java.util.Collection;
import java.util.Objects;
import org.apache.clerezza.commons.rdf.BlankNode;
import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
import org.apache.clerezza.commons.rdf.ImmutableGraph;
import org.apache.clerezza.commons.rdf.IRI;
import org.apache.clerezza.commons.rdf.RDFTerm;
import org.apache.clerezza.commons.rdf.Triple;
import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
/**
*
* @author developer
*/
class SparqlBNode extends BlankNode {
final static IRI internalBNodeId = new IRI("urn:x-internalid:fdmpoihdfw");
final ImmutableGraph context;
private final int isoDistinguisher;
SparqlBNode(BlankNode node, Collection<Triple> context, int isoDistinguisher) {
this.isoDistinguisher = isoDistinguisher;
final SimpleGraph contextBuider = new SimpleGraph();
for (Triple triple : context) {
BlankNodeOrIRI subject = triple.getSubject();
RDFTerm object = triple.getObject();
contextBuider.add(new TripleImpl(subject.equals(node) ? internalBNodeId : subject,
triple.getPredicate(),
object.equals(node) ? internalBNodeId : object));
}
this.context = contextBuider.getImmutableGraph();
}
@Override
public int hashCode() {
int hash = 7+isoDistinguisher;
hash = 61 * hash + Objects.hashCode(this.context);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final SparqlBNode other = (SparqlBNode) obj;
if (isoDistinguisher != other.isoDistinguisher) {
return false;
}
return Objects.equals(this.context, other.context);
}
}
| 376 |
0 | Create_ds/clerezza/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl | Create_ds/clerezza/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlGraph.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.clerezza.commons.rdf.impl.sparql;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.logging.Logger;
import org.apache.clerezza.commons.rdf.BlankNode;
import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
import org.apache.clerezza.commons.rdf.Graph;
import org.apache.clerezza.commons.rdf.ImmutableGraph;
import org.apache.clerezza.commons.rdf.IRI;
import org.apache.clerezza.commons.rdf.Literal;
import org.apache.clerezza.commons.rdf.RDFTerm;
import org.apache.clerezza.commons.rdf.Triple;
import org.apache.clerezza.commons.rdf.impl.utils.AbstractGraph;
import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
/**
*
* @author reto
*/
public class SparqlGraph extends AbstractGraph {
private static final int MAX_ISOMORPHIC_BNODES = 1000;
private static final Logger log = Logger.getLogger(SparqlGraph.class.getName());
final SparqlClient sparqlClient;
/**
* Constructs a Graph representing the default graph at the specified
* endpoint
*/
public SparqlGraph(final String endpoint) {
sparqlClient = new SparqlClient(endpoint);
}
@Override
protected Iterator<Triple> performFilter(final BlankNodeOrIRI filterSubject,
final IRI filterPredicate, final RDFTerm filterObject) {
try {
String query = createQuery(filterSubject, filterPredicate, filterObject);
final List<Map<String, RDFTerm>> sparqlResults = sparqlClient.queryResultSet(query);
//first to triples without bnode-conversion
//rawTriples contains the triples with the BNodes from the result set
final Collection<Triple> rawTriples = new ArrayList<>();
for (Map<String, RDFTerm> result : sparqlResults) {
rawTriples.add(new TripleImpl(filterSubject != null ? filterSubject : (BlankNodeOrIRI) result.get("s"),
filterPredicate != null ? filterPredicate : (IRI) result.get("p"),
filterObject != null ? filterObject : result.get("o")));
}
//then bnode conversion
final Iterator<Triple> rawTriplesIter = rawTriples.iterator();
//this is basically just wokring around the lack of (named) nested functions
return (new Callable<Iterator<Triple>>() {
final Map<BlankNode, SparqlBNode> nodeMap = new HashMap<>();
final Set<ImmutableGraph> usedContext = new HashSet<>();
private RDFTerm useSparqlNode(RDFTerm node) throws IOException {
if (node instanceof BlankNodeOrIRI) {
return useSparqlNode((BlankNodeOrIRI) node);
}
return node;
}
private BlankNodeOrIRI useSparqlNode(BlankNodeOrIRI node) throws IOException {
if (node instanceof BlankNode) {
if (!nodeMap.containsKey(node)) {
createBlankNodesForcontext((BlankNode) node);
}
if (!nodeMap.containsKey(node)) {
throw new RuntimeException("no Bnode created");
}
return nodeMap.get(node);
} else {
return node;
}
}
private void createBlankNodesForcontext(final BlankNode node) throws IOException {
final Collection<Triple> context = getContext(node);
final Set<BlankNode> rawNodes = new HashSet<>();
for (Triple triple : context) {
{
final BlankNodeOrIRI subject = triple.getSubject();
if (subject instanceof BlankNode) {
rawNodes.add((BlankNode) subject);
}
}
{
final RDFTerm object = triple.getObject();
if (object instanceof BlankNode) {
rawNodes.add((BlankNode) object);
}
}
}
final Set<SparqlBNode> createdSparqlNodes = new HashSet<>();
//final Map<BlankNode, SparqlBNode> preliminaryNodes = new HashMap<>();
for (BlankNode rawNode : rawNodes) {
for (int i = 0; i < MAX_ISOMORPHIC_BNODES; i++) {
SparqlBNode sparqlBNode = new SparqlBNode(rawNode, context, i);
if (!createdSparqlNodes.contains(sparqlBNode)) {
nodeMap.put(rawNode, sparqlBNode);
createdSparqlNodes.add(sparqlBNode);
break;
}
}
}
}
private ImmutableGraph getContext(final BlankNode node) throws IOException {
//we need to get the cntext of the BNode
//if the filter was for (null, null, null) we have the whole
//bnode context in the reuslt set, otherwise we need to get
//more triples from the endpoint,
//let's first handle the easy case
if ((filterSubject == null) && (filterPredicate == null)
&& (filterObject == null)) {
return getContextInRaw(node);
} else {
final ImmutableGraph startContext = getContextInRaw(node);
final Set<ImmutableGraph> expandedContexts = expandContext(startContext);
//expand bnode context
//note that there might be different contexts for
//a bnode as present in the current result set
//in this case we just haveto make sure we don't
//pick the same context for different bnodes in the resultset
ImmutableGraph result = null;
for (ImmutableGraph expandedContext : expandedContexts) {
if (!usedContext.contains(expandedContext)) {
result = expandedContext;
break;
}
}
if (result == null) {
log.warning("he underlying sparql graph seems to contain redundant triples, this might cause unexpected results");
result = expandedContexts.iterator().next();
} else {
usedContext.add(result);
}
return result;
}
}
private ImmutableGraph getContextInRaw(BlankNode node) {
final Graph contextBuilder = new SimpleGraph();
for (Triple rawTriple : rawTriples) {
BlankNodeOrIRI rawSubject = rawTriple.getSubject();
RDFTerm rawObject = rawTriple.getObject();
if (rawSubject.equals(node) || rawObject.equals(node)) {
contextBuilder.add(rawTriple);
}
}
return contextBuilder.getImmutableGraph();
}
@Override
public Iterator<Triple> call() throws Exception {
return new Iterator<Triple>() {
@Override
public boolean hasNext() {
return rawTriplesIter.hasNext();
}
@Override
public Triple next() {
try {
Triple rawTriple = rawTriplesIter.next();
return new TripleImpl(useSparqlNode(rawTriple.getSubject()),
rawTriple.getPredicate(),
useSparqlNode(rawTriple.getObject()));
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
};
}
/**
* returns all MSGs that are supergraphs of startContext
*
* @param startContext
* @return
*/
private Set<ImmutableGraph> expandContext(Collection<Triple> startContext) throws IOException {
final StringBuilder queryBuilder = new StringBuilder();
queryBuilder.append("SELECT * WHERE {\n ");
Map<BlankNode, String> bNodeVarNameMap = writeTriplePattern(queryBuilder, startContext);
Set<BlankNode> bNodesInContext = bNodeVarNameMap.keySet();
for (BlankNode bNode : bNodesInContext) {
final String bNodeVarLabel = bNodeVarNameMap.get(bNode);
//looking for outgoing properties of the bnode
queryBuilder.append("OPTIONAL { ");
queryBuilder.append('?');
queryBuilder.append(bNodeVarLabel);
queryBuilder.append(' ');
queryBuilder.append("?po");
queryBuilder.append(bNodeVarLabel);
queryBuilder.append(" ?o");
queryBuilder.append(bNodeVarLabel);
queryBuilder.append(" } .\n");
//looking for incoming properties of the bnode
queryBuilder.append("OPTIONAL { ");
queryBuilder.append("?s");
queryBuilder.append(bNodeVarLabel);
queryBuilder.append(' ');
queryBuilder.append("?pi");
queryBuilder.append(bNodeVarLabel);
queryBuilder.append(" ?");
queryBuilder.append(bNodeVarLabel);
queryBuilder.append(" } .\n");
}
queryBuilder.append(" }");
final List<Map<String, RDFTerm>> expansionQueryResults = sparqlClient.queryResultSet(queryBuilder.toString());
Set<ImmutableGraph> expandedContexts = new HashSet<>();
//the query results may or may be from disjoint supergraphs
//we expand them all as if they are different which may lead
//us to the same MSG multiple times
RESULTS:
for (Map<String, RDFTerm> expansionQueryResult : expansionQueryResults) {
Collection<Triple> expandedContext = new HashSet<>();
Map<BlankNode, BlankNode> newBNodesToOldBNodes = new HashMap<>();
for (BlankNode oldBNode : bNodesInContext) {
final String bNodeVarLabel = bNodeVarNameMap.get(oldBNode);
final RDFTerm newNode = expansionQueryResult.get(bNodeVarLabel);
if (!(newNode instanceof BlankNode)) {
//this subgraph is't a match
continue RESULTS;
}
newBNodesToOldBNodes.put((BlankNode) newNode, oldBNode);
}
expandedContext.addAll(startContext);
boolean newBNodeIntroduced = false;
boolean newTripleAdded = false;
for (BlankNode oldBNode : bNodesInContext) {
final String bNodeVarLabel = bNodeVarNameMap.get(oldBNode);
{
final IRI newPredicate = (IRI) expansionQueryResult.get("po" + bNodeVarLabel);
if (newPredicate != null) {
RDFTerm newObject = expansionQueryResult.get("o" + bNodeVarLabel);
if (newObject instanceof BlankNode) {
if (newBNodesToOldBNodes.containsKey(newObject)) {
//point back to BNode in startContext
newObject = newBNodesToOldBNodes.get(newObject);
} else {
newBNodeIntroduced = true;
}
}
if (expandedContext.add(new TripleImpl(oldBNode, newPredicate, newObject))) {
newTripleAdded = true;
}
}
}
{
final IRI newPredicate = (IRI) expansionQueryResult.get("pi" + bNodeVarLabel);
if (newPredicate != null) {
RDFTerm newSubject = expansionQueryResult.get("s" + bNodeVarLabel);
if (newSubject instanceof BlankNode) {
if (newBNodesToOldBNodes.containsKey(newSubject)) {
//point back to BNode in startContext
newSubject = newBNodesToOldBNodes.get(newSubject);
} else {
newBNodeIntroduced = true;
}
}
if (expandedContext.add(new TripleImpl((BlankNodeOrIRI) newSubject, newPredicate, oldBNode))) {
newTripleAdded = true;
}
}
}
}
if (newBNodeIntroduced) {
//we could be more efficient than this ans just expand the newly introduced bnodes
expandedContexts.addAll(expandContext(expandedContext));
} else {
if (newTripleAdded) {
//look for more results
expandedContexts.addAll(expandContext(expandedContext));
//continued iteration obsoleted by recursion
break;
}
}
}
if (expandedContexts.isEmpty()) {
expandedContexts.add(new SimpleGraph(startContext).getImmutableGraph());
}
return expandedContexts;
}
}).call();
} catch (AlienBNodeException e) {
return new Iterator<Triple>() {
@Override
public boolean hasNext() {
return false;
}
@Override
public Triple next() {
throw new NoSuchElementException();
}
};
} catch (IOException ex) {
throw new RuntimeException(ex);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private String createQuery(final BlankNodeOrIRI filterSubject, final IRI filterPredicate, final RDFTerm filterObject) {
final StringBuilder selectBuilder = new StringBuilder();
selectBuilder.append("SELECT ");
final StringBuilder whereBuilder = new StringBuilder();
whereBuilder.append("WHERE { ");
if (filterSubject == null) {
whereBuilder.append("?s");
selectBuilder.append("?s ");
} else {
if (filterSubject instanceof SparqlBNode) {
whereBuilder.append("?sn");
} else {
whereBuilder.append(asSparqlTerm(filterSubject));
}
}
whereBuilder.append(' ');
if (filterPredicate == null) {
whereBuilder.append("?p");
selectBuilder.append("?p ");
} else {
whereBuilder.append(asSparqlTerm(filterPredicate));
}
whereBuilder.append(' ');
if (filterObject == null) {
whereBuilder.append("?o");
selectBuilder.append("?o ");
} else {
if (filterObject instanceof SparqlBNode) {
whereBuilder.append("?on");
} else {
whereBuilder.append(asSparqlTerm(filterObject));
}
}
whereBuilder.append(" .\n");
if (filterSubject instanceof SparqlBNode) {
//expand bnode context
writeTriplePattern(whereBuilder, ((SparqlBNode) filterSubject).context, "sn");
}
if (filterObject instanceof SparqlBNode) {
//expand bnode context
writeTriplePattern(whereBuilder, ((SparqlBNode) filterObject).context, "on");
}
whereBuilder.append(" }");
return selectBuilder.append(whereBuilder).toString();
}
@Override
protected int performSize() {
try {
//TODO replace this with count
return sparqlClient.queryResultSet("SELECT * WHERE { ?s ?p ?o}").size();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
private String asSparqlTerm(IRI iri) {
return "<" + iri.getUnicodeString() + ">";
}
private String asSparqlTerm(Literal literal) {
//TODO langauge and datatype
return "\"" + literal.getLexicalForm().replace("\n", "\\n").replace("\"", "\\\"") + "\"";
}
private String asSparqlTerm(BlankNode bnode) {
if (!(bnode instanceof SparqlBNode)) {
throw new AlienBNodeException();
}
//this requires adding additional clauses to the graph pattern
throw new RuntimeException("SparqlBNodes should have been handled earlier");
}
private String asSparqlTerm(BlankNodeOrIRI term) {
if (term instanceof IRI) {
return asSparqlTerm((IRI) term);
} else {
return asSparqlTerm((BlankNode) term);
}
}
private String asSparqlTerm(RDFTerm term) {
if (term instanceof BlankNodeOrIRI) {
return asSparqlTerm((BlankNodeOrIRI) term);
} else {
return asSparqlTerm((Literal) term);
}
}
private Map<BlankNode, String> writeTriplePattern(StringBuilder queryBuilder, Collection<Triple> triples) {
return writeTriplePattern(queryBuilder, triples, null);
}
private Map<BlankNode, String> writeTriplePattern(StringBuilder queryBuilder, Collection<Triple> triples, String varLabelForInternalBNodeId) {
final Collection<String> triplePatterns = new ArrayList<>();
int varCounter = 0;
final Map<BlankNode, String> bNodeVarNameMap = new HashMap<>();
for (Triple t : triples) {
final StringBuilder builder = new StringBuilder();
{
final BlankNodeOrIRI s = t.getSubject();
String varName;
if (s instanceof BlankNode) {
if (bNodeVarNameMap.containsKey(s)) {
varName = bNodeVarNameMap.get(s);
} else {
varName = "v" + (varCounter++);
bNodeVarNameMap.put((BlankNode) s, varName);
}
builder.append('?');
builder.append(varName);
} else {
if (s.equals(SparqlBNode.internalBNodeId)) {
builder.append('?');
builder.append(varLabelForInternalBNodeId);
} else {
builder.append(asSparqlTerm(s));
}
}
}
builder.append(' ');
builder.append(asSparqlTerm(t.getPredicate()));
builder.append(' ');
{
final RDFTerm o = t.getObject();
String varName;
if (o instanceof BlankNode) {
if (bNodeVarNameMap.containsKey(o)) {
varName = bNodeVarNameMap.get(o);
} else {
varName = "v" + (varCounter++);
bNodeVarNameMap.put((BlankNode) o, varName);
}
builder.append('?');
builder.append(varName);
} else {
if (o.equals(SparqlBNode.internalBNodeId)) {
builder.append('?');
builder.append(varLabelForInternalBNodeId);
} else {
builder.append(asSparqlTerm(o));
}
}
}
builder.append('.');
triplePatterns.add(builder.toString());
}
for (String triplePattern : triplePatterns) {
queryBuilder.append(triplePattern);
queryBuilder.append('\n');
}
return bNodeVarNameMap;
}
private static class AlienBNodeException extends RuntimeException {
public AlienBNodeException() {
}
}
}
| 377 |
0 | Create_ds/clerezza/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl | Create_ds/clerezza/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlClient.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.clerezza.commons.rdf.impl.sparql;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.http.HttpEntity;
import org.apache.http.NameValuePair;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.apache.clerezza.commons.rdf.RDFTerm;
import org.apache.clerezza.rdf.core.serializedform.Parser;
/**
*
* @author developer
*/
public class SparqlClient {
final String endpoint;
public SparqlClient(final String endpoint) {
this.endpoint = endpoint;
}
public List<Map<String, RDFTerm>> queryResultSet(final String query) throws IOException {
return (List<Map<String, RDFTerm>>) queryResult(query);
}
public Object queryResult(final String query) throws IOException {
CloseableHttpClient httpclient = HttpClients.createDefault();
HttpPost httpPost = new HttpPost(endpoint);
List<NameValuePair> nvps = new ArrayList<NameValuePair>();
nvps.add(new BasicNameValuePair("query", query));
httpPost.setEntity(new UrlEncodedFormEntity(nvps));
CloseableHttpResponse response2 = httpclient.execute(httpPost);
HttpEntity entity2 = response2.getEntity();
try {
InputStream in = entity2.getContent();
final String mediaType = entity2.getContentType().getValue();
if (mediaType.startsWith("application/sparql-results+xml")) {
return SparqlResultParser.parse(in);
} else {
//assuming RDF response
//FIXME clerezza-core-rdf to clerezza dependency
Parser parser = Parser.getInstance();
return parser.parse(in, mediaType);
}
} finally {
EntityUtils.consume(entity2);
response2.close();
}
}
}
| 378 |
0 | Create_ds/clerezza/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl | Create_ds/clerezza/impl.sparql/src/main/java/org/apache/clerezza/commons/rdf/impl/sparql/SparqlResultParser.java | /*
* Copyright 2016 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.clerezza.commons.rdf.impl.sparql;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.apache.clerezza.commons.rdf.BlankNode;
import org.apache.clerezza.commons.rdf.IRI;
import org.apache.clerezza.commons.rdf.Language;
import org.apache.clerezza.commons.rdf.RDFTerm;
import org.apache.clerezza.commons.rdf.impl.utils.AbstractLiteral;
import org.apache.http.util.EntityUtils;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
/**
*
* @author user
*/
public class SparqlResultParser {
static Object parse(InputStream in) throws IOException {
try {
SAXParserFactory spf = SAXParserFactory.newInstance();
spf.setNamespaceAware(true);
SAXParser saxParser = spf.newSAXParser();
XMLReader xmlReader = saxParser.getXMLReader();
final SparqlsResultsHandler sparqlsResultsHandler = new SparqlsResultsHandler();
xmlReader.setContentHandler(sparqlsResultsHandler);
xmlReader.parse(new InputSource(in));
return sparqlsResultsHandler.getResults();
} catch (ParserConfigurationException | SAXException ex) {
throw new RuntimeException(ex);
}
}
final public static class SparqlsResultsHandler extends DefaultHandler {
private String currentBindingName;
private Map<String, RDFTerm> currentResult = null;
private Object results = null;
private boolean readingValue;
private String lang; //the xml:lang attribute of a literal
private StringWriter valueWriter;
private Map<String, BlankNode> bNodeMap = new HashMap<>();
private static final IRI XSD_STRING = new IRI("http://www.w3.org/2001/XMLSchema#string");
private static final IRI RDF_LANG_STRING = new IRI("http://www.w3.org/1999/02/22-rdf-syntax-ns#langString");
private RDFTerm getBNode(String value) {
if (!bNodeMap.containsKey(value)) {
bNodeMap.put(value, new BlankNode());
}
return bNodeMap.get(value);
}
private Object getResults() {
return results;
}
private List<Map<String, RDFTerm>> getResultValueMaps() {
return (List<Map<String, RDFTerm>>) results;
}
enum BindingType {
uri, bnode, literal;
}
@Override
public void startDocument() throws SAXException {
}
@Override
public void startElement(String namespaceURI,
String localName,
String qName,
Attributes atts)
throws SAXException {
if ("http://www.w3.org/2005/sparql-results#".equals(namespaceURI)) {
if ("boolean".equals(localName)) {
if (results != null) {
throw new SAXException("unexpected tag <boolean>");
}
//results will have Boolean value assigned once value is read
readingValue = true;
valueWriter = new StringWriter();
} else if ("results".equals(localName)) {
if (results != null) {
throw new SAXException("unexpected tag <result>");
}
results = new ArrayList<Map<String, RDFTerm>>();
} else if ("result".equals(localName)) {
if (currentResult != null) {
throw new SAXException("unexpected tag <result>");
}
currentResult = new HashMap<String, RDFTerm>();
} else if ("binding".equals(localName)) {
if (currentResult == null) {
throw new SAXException("unexpected tag <binding>");
}
currentBindingName = atts.getValue("name");
} else if ("uri".equals(localName) || "bnode".equals(localName) || "literal".equals(localName)) {
if (readingValue) {
throw new SAXException("unexpected tag <" + localName + ">");
}
lang = atts.getValue("http://www.w3.org/XML/1998/namespace", "lang");
readingValue = true;
valueWriter = new StringWriter();
}
}
//System.out.println(namespaceURI);
//System.out.println(qName);
}
@Override
public void characters(char[] chars, int start, int length) throws SAXException {
if (readingValue) {
valueWriter.write(chars, start, length);
//System.err.println(value + start + ", " + length);
}
}
@Override
public void endElement(String namespaceURI,
String localName,
String qName)
throws SAXException {
if ("http://www.w3.org/2005/sparql-results#".equals(namespaceURI)) {
if ("result".equals(localName)) {
((List<Map<String, RDFTerm>>) results).add(currentResult);
currentResult = null;
} else if ("binding".equals(localName)) {
if (currentBindingName == null) {
throw new SAXException("unexpected tag </binding>");
}
currentBindingName = null;
} else if ("boolean".equals(localName)) {
results = new Boolean(valueWriter.toString());
valueWriter = null;
readingValue = false;
} else {
try {
BindingType b = BindingType.valueOf(localName);
RDFTerm rdfTerm = null;
final Language language = lang == null ? null : new Language(lang);;
switch (b) {
case uri:
rdfTerm = new IRI(valueWriter.toString());
valueWriter = null;
break;
case bnode:
rdfTerm = getBNode(valueWriter.toString());
valueWriter = null;
break;
case literal:
final String lf = valueWriter.toString();
rdfTerm = new AbstractLiteral() {
@Override
public String getLexicalForm() {
return lf;
}
@Override
public IRI getDataType() {
if (language != null) {
return RDF_LANG_STRING;
}
//TODO implement
return XSD_STRING;
}
@Override
public Language getLanguage() {
return language;
}
@Override
public String toString() {
return "\"" + getLexicalForm() + "\"@" + getLanguage();
}
};
break;
}
currentResult.put(currentBindingName, rdfTerm);
readingValue = false;
} catch (IllegalArgumentException e) {
//not uri|bnode|literal
}
}
}
}
public void endDocument() throws SAXException {
//System.out.println("results: " + results.size());
}
}
}
| 379 |
0 | Create_ds/clerezza/representation/src/test/java/org/apache/clerezza | Create_ds/clerezza/representation/src/test/java/org/apache/clerezza/representation/ParserTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.representation;
import org.apache.clerezza.Graph;
import org.apache.clerezza.IRI;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
import java.io.InputStream;
/**
*
* @author reto
*/
@RunWith(JUnitPlatform.class)
public class ParserTest {
private static boolean providerAInvoked;
private static boolean providerBInvoked;
private ParsingProvider parsingProviderA = new ParsingProviderA();
private ParsingProvider parsingProviderB = new ParsingProviderB();
@Test
public void registerOneProvider() {
Parser parser = new Parser(null);
parser.bindParsingProvider(parsingProviderA);
providerAInvoked = false;
parser.parse(null, "application/x-fantasy2+rdf");
Assertions.assertTrue(providerAInvoked);
}
@Test
public void registerAndUnregisterSecond() {
Parser parser = new Parser(null);
parser.bindParsingProvider(parsingProviderA);
parser.bindParsingProvider(parsingProviderB);
providerAInvoked = false;
providerBInvoked = false;
parser.parse(null, "application/x-fantasy2+rdf");
Assertions.assertFalse(providerAInvoked);
Assertions.assertTrue(providerBInvoked);
providerAInvoked = false;
providerBInvoked = false;
parser.parse(null, "application/x-fantasy1+rdf");
Assertions.assertTrue(providerAInvoked);
Assertions.assertFalse(providerBInvoked);
parser.unbindParsingProvider(parsingProviderB);
providerAInvoked = false;
providerBInvoked = false;
parser.parse(null, "application/x-fantasy2+rdf");
Assertions.assertTrue(providerAInvoked);
Assertions.assertFalse(providerBInvoked);
}
@SupportedFormat({"application/x-fantasy1+rdf", "application/x-fantasy2+rdf"})
static class ParsingProviderA implements ParsingProvider {
@Override
public void parse(Graph target, InputStream serializedGraph, String formatIdentifier, IRI baseUri) {
providerAInvoked = true;
}
};
@SupportedFormat("application/x-fantasy2+rdf")
static class ParsingProviderB implements ParsingProvider {
@Override
public void parse(Graph target, InputStream serializedGraph, String formatIdentifier, IRI baseUri) {
providerBInvoked = true;
}
};
}
| 380 |
0 | Create_ds/clerezza/representation/src/test/java/org/apache/clerezza | Create_ds/clerezza/representation/src/test/java/org/apache/clerezza/representation/TestServiceManagedProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.representation;
import org.apache.clerezza.Graph;
import org.apache.clerezza.IRI;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
import java.io.InputStream;
/**
* This class is listed in
* META-INF/services/org.apache.clerezza.serializedform.ParsingProvider
*
* @author reto
*/
@RunWith(JUnitPlatform.class)
@SupportedFormat("application/x-test+rdf")
public class TestServiceManagedProvider implements ParsingProvider {
private static boolean parseInvoked;
@Override
public void parse(Graph target, InputStream serializedGraph, String formatIdentifier, IRI baseUri) {
parseInvoked = true;
}
@Test
public void registerOneProvider() {
Parser parser = Parser.getInstance();
parser.parse(null, "application/x-test+rdf");
Assertions.assertTrue(parseInvoked);
}
}
| 381 |
0 | Create_ds/clerezza/representation/src/test/java/org/apache/clerezza | Create_ds/clerezza/representation/src/test/java/org/apache/clerezza/representation/SerializerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.representation;
import org.apache.clerezza.Graph;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.platform.runner.JUnitPlatform;
import org.junit.runner.RunWith;
import java.io.OutputStream;
/**
*
* @author mir
*/
@RunWith(JUnitPlatform.class)
public class SerializerTest {
private static boolean providerAInvoked;
private static boolean providerBInvoked;
private SerializingProvider serializingProviderA = new SerializingProviderA();
private SerializingProvider serializingProviderB = new SerializingProviderB();
@Test
public void registerOneProvider() {
Serializer serializer = new Serializer(null);
serializer.bindSerializingProvider(serializingProviderA);
providerAInvoked = false;
serializer.serialize(null, null, "application/x-fantasy2+rdf");
Assertions.assertTrue(providerAInvoked);
}
@Test
public void registerAndUnregisterSecond() {
Serializer serializer = new Serializer(null);
serializer.bindSerializingProvider(serializingProviderA);
serializer.bindSerializingProvider(serializingProviderB);
providerAInvoked = false;
providerBInvoked = false;
serializer.serialize(null, null, "application/x-fantasy2+rdf");
Assertions.assertFalse(providerAInvoked);
Assertions.assertTrue(providerBInvoked);
providerAInvoked = false;
providerBInvoked = false;
serializer.serialize(null, null, "application/x-fantasy1+rdf");
Assertions.assertTrue(providerAInvoked);
Assertions.assertFalse(providerBInvoked);
serializer.unbindSerializingProvider(serializingProviderB);
providerAInvoked = false;
providerBInvoked = false;
serializer.serialize(null, null, "application/x-fantasy2+rdf");
Assertions.assertTrue(providerAInvoked);
Assertions.assertFalse(providerBInvoked);
}
@SupportedFormat({"application/x-fantasy1+rdf", "application/x-fantasy2+rdf"})
static class SerializingProviderA implements SerializingProvider {
@Override
public void serialize(OutputStream serializedGraph, Graph tc, String formatIdentifier) {
providerAInvoked = true;
}
};
@SupportedFormat("application/x-fantasy2+rdf")
static class SerializingProviderB implements SerializingProvider {
@Override
public void serialize(OutputStream serializedGraph, Graph tc, String formatIdentifier) {
providerBInvoked = true;
}
};
}
| 382 |
0 | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza/representation/Parser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.representation;
import org.apache.clerezza.Graph;
import org.apache.clerezza.IRI;
import org.apache.clerezza.ImmutableGraph;
import org.apache.clerezza.implementation.in_memory.SimpleGraph;
import org.osgi.service.cm.ConfigurationAdmin;
import org.osgi.service.component.ComponentContext;
import org.osgi.service.component.annotations.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;
/**
* This singleton class provides a method
* <code>parse</code> to transform serialized RDF forms into {@link ImmutableGraph}s.
*
* Functionality is delegated to registered {@link ParsingProvider}s. Such
* <code>ParsingProvider</code>s can be registered and unregistered, later
* registered
* <code>ParsingProvider</code>s shadow previously registered providers for the
* same format.
*
* Note on synchronization:
* <code>ParsingProvider</code>s must be able to handle concurrent requests.
*
* @author reto
*
*/
@Component(service = Parser.class)
public class Parser {
private ConfigurationAdmin configurationAdmin;
/**
* The list of providers in the order of registration
*/
private List<ParsingProvider> providerList = new ArrayList<ParsingProvider>();
/**
* A map to quickly locate a provider
*/
private volatile Map<String, ParsingProvider> providerMap = new HashMap<String, ParsingProvider>();
/**
* The singleton instance
*/
private volatile static Parser instance;
private boolean active;
private static final Logger log = LoggerFactory.getLogger(Parser.class);
/**
* the constructor sets the singleton instance to allow instantiation
* by OSGi-DS. This constructor should not be called except by OSGi-DS,
* otherwise the static <code>getInstance</code> method should be used.
*/
public Parser() {
log.info("constructing Parser");
Parser.instance = this;
}
/**
* A constructor for tests, which doesn't set the singleton instance
*
* @param dummy an ignored argument to distinguish this from the other constructor
*/
Parser(Object dummy) {
active = true;
}
/**
* This returns the singleton instance, if an instance has been previously
* created (e.g. by OSGi declarative services) this instance is returned,
* otherwise a new instance is created and providers are injected using
* the service provider interface (META-INF/services/)
*
* @return the singleton Parser instance
*/
public static Parser getInstance() {
if (instance == null) {
synchronized (Parser.class) {
if (instance == null) {
new Parser();
Iterator<ParsingProvider> parsingProviders =
ServiceLoader.load(ParsingProvider.class).iterator();
while (parsingProviders.hasNext()) {
ParsingProvider parsingProvider = parsingProviders.next();
instance.bindParsingProvider(parsingProvider);
}
instance.active = true;
instance.refreshProviderMap();
}
}
}
return instance;
}
@Activate
protected void activate(final ComponentContext componentContext) {
active = true;
refreshProviderMap();
//changing the congiguration before this finshed activating causes a new instance to be created
/*(new Thread() {
@Override
public void run() {
try {
Thread.sleep(100);
} catch (InterruptedException ex) {
return;
}
refreshProviderMap();
}
}).start();*/
}
@Deactivate
protected void deactivate(final ComponentContext componentContext) {
active = false;
}
@Modified
void modified(ComponentContext ctx) {
log.debug("modified");
}
/**
* Parses a serialized ImmutableGraph from an InputStream. This delegates the
* processing to the provider registered for the specified format, if
* the formatIdentifier contains a ';'-character only the section before
* that character is used for choosing the provider.
*
* @param serializedGraph an inputstream with the serialization
* @param formatIdentifier a string identifying the format (usually the MIME-type)
* @return the ImmutableGraph read from the stream
* @throws UnsupportedFormatException
*/
public ImmutableGraph parse(InputStream serializedGraph,
String formatIdentifier) throws UnsupportedFormatException {
return parse(serializedGraph, formatIdentifier, null);
}
/**
* Parses a serialized ImmutableGraph from an InputStream. This delegates the
* processing to the provider registered for the specified format, if
* the formatIdentifier contains a ';'-character only the section before
* that character is used for choosing the provider.
*
* @param target the Graph to which the parsed triples are added
* @param serializedGraph an inputstream with the serialization
* @param formatIdentifier a string identifying the format (usually the MIME-type)
* @throws UnsupportedFormatException
*/
public void parse(Graph target, InputStream serializedGraph,
String formatIdentifier) throws UnsupportedFormatException {
parse(target, serializedGraph, formatIdentifier, null);
}
/**
* Parses a serialized ImmutableGraph from an InputStream. This delegates the
* processing to the provider registered for the specified format, if
* the formatIdentifier contains a ';'-character only the section before
* that character is used for choosing the provider.
*
* @param serializedGraph an inputstream with the serialization
* @param formatIdentifier a string identifying the format (usually the MIME-type)
* @param baseUri the uri against which relative uri-refs are evaluated
* @return the ImmutableGraph read from the stream
* @throws UnsupportedFormatException
*/
public ImmutableGraph parse(InputStream serializedGraph,
String formatIdentifier, IRI baseUri) throws UnsupportedFormatException {
Graph graph = new SimpleGraph();
parse(graph, serializedGraph, formatIdentifier, baseUri);
return graph.getImmutableGraph();
}
/**
* Parses a serialized ImmutableGraph from an InputStream. This delegates the
* processing to the provider registered for the specified format, if
* the formatIdentifier contains a ';'-character only the section before
* that character is used for choosing the provider.
*
* @param target the Graph to which the parsed triples are added
* @param serializedGraph an inputstream with the serialization
* @param formatIdentifier a string identifying the format (usually the MIME-type)
* @param baseUri the uri against which relative uri-refs are evaluated
* @throws UnsupportedFormatException
*/
public void parse(Graph target, InputStream serializedGraph,
String formatIdentifier, IRI baseUri) throws UnsupportedFormatException {
String deParameterizedIdentifier;
int semicolonPos = formatIdentifier.indexOf(';');
if (semicolonPos > -1) {
deParameterizedIdentifier = formatIdentifier.substring(0, semicolonPos);
} else {
deParameterizedIdentifier = formatIdentifier;
}
ParsingProvider provider = providerMap.get(deParameterizedIdentifier);
if (provider == null) {
throw new UnsupportedParsingFormatException(formatIdentifier);
}
provider.parse(target, serializedGraph, formatIdentifier, baseUri);
}
/**
* Get a set of supported formats
*
* @return a set if stings identifying formats (usually the MIME-type)
*/
public Set<String> getSupportedFormats() {
return Collections.unmodifiableSet(providerMap.keySet());
}
/**
* Registers a parsing provider
*
* @param provider the provider to be registered
*/
@Reference(policy = ReferencePolicy.DYNAMIC,
cardinality = ReferenceCardinality.MULTIPLE)
public void bindParsingProvider(ParsingProvider provider) {
providerList.add(provider);
refreshProviderMap();
}
/**
* Unregister a parsing provider
*
* @param provider the provider to be deregistered
*/
public void unbindParsingProvider(ParsingProvider provider) {
providerList.remove(provider);
refreshProviderMap();
}
/**
* Update providerMap with the providers in the providerList
*
*/
private void refreshProviderMap() {
if (active) {
try {
final Map<String, ParsingProvider> newProviderMap = new HashMap<String, ParsingProvider>();
for (ParsingProvider provider : providerList) {
String[] formatIdentifiers = getFormatIdentifiers(provider);
for (String formatIdentifier : formatIdentifiers) {
newProviderMap.put(formatIdentifier, provider);
}
}
providerMap = newProviderMap;
if (configurationAdmin != null) { //i.e. when we are in an OSGi environment
Dictionary<String, Object> newConfig = configurationAdmin.getConfiguration(getClass().getName()).getProperties();
if (newConfig == null) {
newConfig = new Hashtable<String, Object>();
}
Set<String> supportedFormats = getSupportedFormats();
String[] supportedFromatsArray = supportedFormats.toArray(new String[supportedFormats.size()]);
newConfig.put(SupportedFormat.supportedFormat, supportedFromatsArray);
configurationAdmin.getConfiguration(getClass().getName()).update(newConfig);
}
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
}
/**
* Extract format identifiers for a parsing provider
*
* @param provider the provider to be registered
* @return formatIdentifiers
*/
private String[] getFormatIdentifiers(ParsingProvider parsingProvider) {
Class<? extends ParsingProvider> clazz = parsingProvider.getClass();
SupportedFormat supportedFormatAnnotation = clazz.getAnnotation(SupportedFormat.class);
String[] formatIdentifiers = supportedFormatAnnotation.value();
return formatIdentifiers;
}
@Reference
protected void bindConfigurationAdmin(ConfigurationAdmin configurationAdmin) {
this.configurationAdmin = configurationAdmin;
}
protected void unbindConfigurationAdmin(ConfigurationAdmin configurationAdmin) {
this.configurationAdmin = null;
}
}
| 383 |
0 | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza/representation/UnsupportedFormatException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.representation;
/**
*
* This exception is thrown on an attempt to parse a format for which no
* <code>ParsingProvider</code> is known.
*
* @author reto
*/
public abstract class UnsupportedFormatException extends RuntimeException {
public UnsupportedFormatException(String message) {
super(message);
}
}
| 384 |
0 | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza/representation/SerializingProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.representation;
import org.apache.clerezza.Graph;
import java.io.OutputStream;
/**
* An instance of this class serializes <code>Graph</code>s to a
* specified serialization format. The supported formats are indicated using the
* {@link SupportedFormat} annotation.
*
* @author mir
*/
public interface SerializingProvider {
/** Serializes a <code>Graph</code> to a specified
* <code>OutputStream</code> in the format identified by
* <code>formatIdentifier</code>. This method will be invoked
* for a supported format, a format is considered as supported if the part
* before a ';'-character in the <code>formatIdentifier</code> matches
* a <code>SupportedFormat</code> annotation of the implementing class.
*
* @param outputStream
* @param tc
* @param formatIdentifier
*/
public void serialize(OutputStream outputStream, Graph tc,
String formatIdentifier);
}
| 385 |
0 | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza/representation/UnsupportedSerializationFormatException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.representation;
/**
*
* @author reto
*/
public class UnsupportedSerializationFormatException extends UnsupportedFormatException {
/**
* Creates an instance for a specified unsupported format.
*
* @param formatIdentifier the unsupported format
*/
public UnsupportedSerializationFormatException(String formatIdentifier) {
super("No serializer available for "+formatIdentifier);
}
}
| 386 |
0 | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza/representation/Serializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.representation;
import org.apache.clerezza.Graph;
import org.osgi.service.cm.ConfigurationAdmin;
import org.osgi.service.component.ComponentContext;
import org.osgi.service.component.annotations.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.OutputStream;
import java.util.*;
/**
* This singleton class provides a method <code>serialize</code> to transform a
* {@link Graph} into serialized RDF forms.
*
* Functionality is delegated to registered {@link SerializingProvider}s. Such
* <code>SerializingProvider</code>s can be registered and unregistered, later
* registered <code>SerializingProvider</code>s shadow previously registered
* providers for the same format.
*
* Note on synchronization: <code>SerializingProvider</code>s must be able to
* handle concurrent requests.
*
* @author mir
*
*/
@Component(service = Serializer.class)
public class Serializer {
private ConfigurationAdmin configurationAdmin;
/**
* The list of providers in the order of registration
*/
private List<SerializingProvider> providerList = new ArrayList<SerializingProvider>();
/**
* A map to quickly locate a provider
*/
private volatile Map<String, SerializingProvider> providerMap = new HashMap<String, SerializingProvider>();
/**
* The singleton instance
*/
private volatile static Serializer instance;
private static final Logger log = LoggerFactory.getLogger(Serializer.class);
private boolean active;
/**
* the constructor sets the singleton instance to allow instantiation
* by OSGi-DS. This constructor should not be called except by OSGi-DS,
* otherwise the static <code>getInstance</code> method should be used.
*/
public Serializer() {
Serializer.instance = this;
}
/**
* A constructor for tests, which doesn't set the singleton instance
*
* @param dummy
* an ignored argument to distinguish this from the other
* constructor
*/
Serializer(Object dummy) {
active = true;
}
/**
* This returns the singleton instance, if an instance has been previously
* created (e.g. by OSGi declarative services) this instance is returned,
* otherwise a new instance is created and providers are injected using the
* service provider interface (META-INF/services/)
*
* @return the singleton Serializer instance
*/
public static Serializer getInstance() {
if (instance == null) {
synchronized (Serializer.class) {
if (instance == null) {
new Serializer();
Iterator<SerializingProvider> SerializingProviders = ServiceLoader
.load(SerializingProvider.class).iterator();
while (SerializingProviders.hasNext()) {
SerializingProvider SerializingProvider = SerializingProviders
.next();
instance.bindSerializingProvider(SerializingProvider);
}
instance.active = true;
instance.refreshProviderMap();
}
}
}
return instance;
}
@Activate
protected void activate(final ComponentContext componentContext) {
active = true;
refreshProviderMap();
}
@Deactivate
protected void deactivate(final ComponentContext componentContext) {
active = false;
}
@Modified
void modified(ComponentContext ctx) {
log.debug("modified");
}
/**
* Serializes a ImmutableGraph into an OutputStream. This delegates the
* processing to the provider registered for the specified format, if
* the formatIdentifier contains a ';'-character only the section before
* that character is used for choosing the provider.
*
* @param serializedGraph
* an outputStream into which the ImmutableGraph will be serialized
* @param tc the <code>Graph</code> to be serialized
* @param formatIdentifier
* a string specifying the serialization format (usually the
* MIME-type)
* @throws UnsupportedFormatException
*/
public void serialize(OutputStream serializedGraph, Graph tc,
String formatIdentifier) throws UnsupportedFormatException {
String deParameterizedIdentifier;
int semicolonPos = formatIdentifier.indexOf(';');
if (semicolonPos > -1) {
deParameterizedIdentifier = formatIdentifier.substring(0, semicolonPos);
} else {
deParameterizedIdentifier = formatIdentifier;
}
SerializingProvider provider = providerMap.get(deParameterizedIdentifier);
if (provider == null) {
throw new UnsupportedSerializationFormatException(formatIdentifier);
}
provider.serialize(serializedGraph, tc, formatIdentifier);
}
/**
* Get a set of supported formats
*
* @return a set if stings identifying formats (usually the MIME-type)
*/
public Set<String> getSupportedFormats() {
return Collections.unmodifiableSet(providerMap.keySet());
}
/**
* Registers a Serializing provider
*
* @param provider
* the provider to be registered
*/
@Reference(policy = ReferencePolicy.DYNAMIC,
cardinality = ReferenceCardinality.MULTIPLE)
public void bindSerializingProvider(SerializingProvider provider) {
providerList.add(provider);
refreshProviderMap();
}
/**
* Unregister a Serializing provider
*
* @param provider
* the provider to be unregistered
*/
public void unbindSerializingProvider(SerializingProvider provider) {
providerList.remove(provider);
refreshProviderMap();
}
private void refreshProviderMap() {
if (active) {
final Map<String, SerializingProvider> newProviderMap = new HashMap<String, SerializingProvider>();
//we want more generic providers first so they get overridden by more specific ones
Collections.sort(providerList, new Comparator<SerializingProvider>() {
@Override
public int compare(SerializingProvider s1, SerializingProvider s2) {
return getFormatIdentifiers(s2).length - getFormatIdentifiers(s1).length;
}
});
for (SerializingProvider provider : providerList) {
String[] formatIdentifiers = getFormatIdentifiers(provider);
for (String formatIdentifier : formatIdentifiers) {
newProviderMap.put(formatIdentifier, provider);
}
}
providerMap = newProviderMap;
if (configurationAdmin != null) { //we are in OSGi environment
try {
Dictionary<String, Object> newConfig = configurationAdmin.getConfiguration(getClass().getName()).getProperties();
if (newConfig == null) {
newConfig = new Hashtable<String, Object>();
}
Set<String> supportedFormats = getSupportedFormats();
String[] supportedFromatsArray = supportedFormats.toArray(new String[supportedFormats.size()]);
newConfig.put(SupportedFormat.supportedFormat, supportedFromatsArray);
configurationAdmin.getConfiguration(getClass().getName()).update(newConfig);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
}
}
private String[] getFormatIdentifiers(
SerializingProvider SerializingProvider) {
Class<? extends SerializingProvider> clazz = SerializingProvider
.getClass();
SupportedFormat supportedFormatAnnotation = clazz
.getAnnotation(SupportedFormat.class);
String[] formatIdentifiers = supportedFormatAnnotation.value();
return formatIdentifiers;
}
@Reference
protected void bindConfigurationAdmin(ConfigurationAdmin configurationAdmin) {
this.configurationAdmin = configurationAdmin;
}
protected void unbindConfigurationAdmin(ConfigurationAdmin configurationAdmin) {
this.configurationAdmin = null;
}
}
| 387 |
0 | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza/representation/SupportedFormat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.representation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* This annotation is used to annotate {@link ParsingProvider}s to indicate
* the format(s) they support.
*
* @author reto
*/
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
public @interface SupportedFormat {
/**
* used as a key for the OSGi service property
*/
public static final String supportedFormat = "supportedFormat";
public static final String RDF_XML = "application/rdf+xml";
public static final String TURTLE = "text/turtle";
public static final String X_TURTLE = "application/x-turtle";
public static final String N_TRIPLE = "application/n-triples";
/**
* @deprecated The mime-type for N Triples is "application/n-triples": http://www.w3.org/TR/n-triples/#sec-mediaReg-n-triples
*/
@Deprecated
public static final String TEXT_RDF_NT = "text/rdf+nt";
public static final String N3 = "text/rdf+n3";
public static final String RDF_JSON = "application/rdf+json";
//both html and xhtml can be rdf formats with RDFa
public static final String XHTML = "application/xhtml+xml";
public static final String HTML = "text/html";
/**
* A list of format Identifiers (typically MIME-types) types without
* parameter (without ';'-character).
* E.g. {"application/rdf+xml","application/turtle"}
*/
String[] value();
}
| 388 |
0 | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza/representation/ParsingProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.representation;
import org.apache.clerezza.Graph;
import org.apache.clerezza.IRI;
import java.io.InputStream;
/**
* An instance of this class parses RDF-ImmutableGraph from one or more serialization
* formats. The supported formats are indicated using the {@link SupportedFormat}
* annotation.
*
* @author reto
*/
public interface ParsingProvider {
/**
* Parses a stream as the specified RDF-format. This method will be invoked
* for a supported format, a format is considered as supported if the part
* before a ';'-character in the <code>formatIdentifier</code> matches
* a <code>SupportedFormat</code> annotation of the implementing class.
*
* @param target the mutable ImmutableGraph to which the read triples shall be added
* @param serializedGraph the stream from which the serialized ImmutableGraph is read
* @param formatIdentifier a String identifying the format
* @param baseUri the baseUri for interpreting relative uris, may be null
*/
void parse(Graph target, InputStream serializedGraph,
String formatIdentifier, IRI baseUri);
}
| 389 |
0 | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza | Create_ds/clerezza/representation/src/main/java/org/apache/clerezza/representation/UnsupportedParsingFormatException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza.representation;
/**
*
* @author reto
*/
public class UnsupportedParsingFormatException extends UnsupportedFormatException {
/**
* Creates an instance for a specified unsupported format.
*
* @param formatIdentifier the unsupported format
*/
public UnsupportedParsingFormatException(String formatIdentifier) {
super("No parser available for "+formatIdentifier);
}
}
| 390 |
0 | Create_ds/clerezza/api/src/main/java/org/apache | Create_ds/clerezza/api/src/main/java/org/apache/clerezza/WatchableGraph.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza;
import org.apache.clerezza.event.FilterTriple;
import org.apache.clerezza.event.GraphListener;
/**
* An extension to the Graph interface that allows to add throws events
* on modifications.
*
* @author reto
*/
public interface WatchableGraph extends Graph {
/**
* Adds the specified <code>GraphListener</code> to the graph. This listener
* will be notified, when the graph is modified and the <code>Triple</code>
* that was part of the modifiaction matched the specified
* <code>FilterTriple</code>. The notification will be passed to the
* listener after the specified delay time (in milli-seconds) has passed.
* If more matching events occur during the delay period, then they are
* passed all together at the end of the delay period. If the the listener
* unregisters or the platform is stopped within the period then the already
* occurred events may not be delivered.
* <p>
* All implementations support this method, immutable implementations will
* typically provide an empty implementation, they shall not throw an
* exception.
* <p>
* Implementation of which the triples change over time without add- and
* remove-methods being called (e.g. implementation dynamically generating
* their triples on invocation of the filer-method) may choose not to, or
* only partially propagate their changes to the listener. They should
* describe the behavior in the documentation of the class.
* <p>
* Implementations should keep weak references the listeners, so that the
* listener can be garbage collected if its no longer referenced by another
* object.
* <p>
* If delay is 0 notification will happen synchronously.
*
* @param listener The listener that will be notified
* @param filter The triple filter with which triples are tested,
* that were part of the modification.
* @param delay The time period after which the listener will be notified in milliseconds.
*/
public void addGraphListener(GraphListener listener, FilterTriple filter,
long delay);
/**
* Adds the specified <code>GraphListener</code> to the graph. This listener
* will be notified, when the graph is modified and the <code>Triple</code>
* that was part of the modification matched the specified
* <code>FilterTriple</code>. The notification will be passed without delay.
* <p>
* Same as <code>addGraphListener(listener, filter, 0)</code>.
*
* @param listener The listener that will be notified
* @param filter The triple filter with which triples are tested,
* that were part of the modification.
*/
public void addGraphListener(GraphListener listener, FilterTriple filter);
/**
* Removes the specified <code>GraphListener</code> from the graph. This
* listener will no longer be notified, when the graph is modified.
*
* @param listener The listener to be removed.
*/
public void removeGraphListener(GraphListener listener);
}
| 391 |
0 | Create_ds/clerezza/api/src/main/java/org/apache | Create_ds/clerezza/api/src/main/java/org/apache/clerezza/Graph.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza;
import java.util.Collection;
import java.util.Iterator;
import java.util.concurrent.locks.ReadWriteLock;
/**
* A set of triples (as it doesn't allow duplicates), it does however
* not extend {@link java.util.Set} as it doesn't inherit its
* specification for <code>hashCode()</code> and <code>equals</code>.
* It is possible to add <code>GraphListener</code> to listen for modifications
* in the triples.
*
* @author reto
*/
public interface Graph extends Collection<Triple> {
/**
* Filters triples given a pattern.
* filter(null, null, null) returns the same as iterator()
*
* @param subject
* @param predicate
* @param object
* @return <code>Iterator</code>
*/
public Iterator<Triple> filter(BlankNodeOrIRI subject, IRI predicate,
RDFTerm object);
/**
* Returns true if <code>other</code> describes the same graph and will
* always describe the same graph as this instance, false otherwise.
* It returns true if this == other or if it
* is otherwise guaranteed that changes to one of the instances are
* immediately reflected in the other or if both graphs are immutable.
*
* @param other
* @return true if other == this
*/
@Override
public boolean equals(Object other);
/**
* Returns an ImutableGraph describing the graph at the current point in
* time. if <code>this</code> is an instance of ImmutableGraph this can
* safely return <code>this</code>.
*
* @return the current time slice of the possibly mutable graph represented by the instance.
*/
public ImmutableGraph getImmutableGraph();
/**
* The lock provided by this methods allows to create read- and write-locks
* that span multiple method calls. Having a read locks prevents other
* threads from writing to this Graph, having a write-lock prevents other
* threads from reading and writing. Implementations would typically
* return a <code>java.util.concurrent.locks.ReentrantReadWriteLock</code>.
* Immutable instances (such as instances of <code>ImmutableGraph</code>)
* or instances used in transaction where concurrent acces of the same
* instance is not an issue may return a no-op ReadWriteLock (i.e. one
* which returned ReadLock and WriteLock instances of which the methods do
* not do anything)
*
* @return the lock of this Graph
*/
ReadWriteLock getLock();
}
| 392 |
0 | Create_ds/clerezza/api/src/main/java/org/apache | Create_ds/clerezza/api/src/main/java/org/apache/clerezza/RDFTerm.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza;
/**
* An <a href= "http://www.w3.org/TR/rdf11-concepts/#dfn-rdf-term" >RDF-1.1
* Term</a>, as defined by <a href= "http://www.w3.org/TR/rdf11-concepts/"
* >RDF-1.1 Concepts and Abstract Syntax</a>, a W3C Recommendation published on
* 25 February 2014.<br>
*
* @see <a href= "http://www.w3.org/TR/rdf11-concepts/#dfn-rdf-term" >RDF-1.1
* Term</a>
*/
public interface RDFTerm {
}
| 393 |
0 | Create_ds/clerezza/api/src/main/java/org/apache | Create_ds/clerezza/api/src/main/java/org/apache/clerezza/BlankNode.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza;
/**
* A Blank Node represents a resource,
* but does not indicate a URI for the resource. Blank nodes act like
* existentially qualified variables in first order logic.
* <p>
* An <a href= "http://www.w3.org/TR/rdf11-concepts/#dfn-blank-node" >RDF-1.1
* Blank Node</a>, as defined by <a href=
* "http://www.w3.org/TR/rdf11-concepts/#section-blank-nodes" >RDF-1.1 Concepts
* and Abstract Syntax</a>, a W3C Recommendation published on 25 February 2014.<br>
* <p>
* Note that: Blank nodes are disjoint from IRIs and literals. Otherwise,
* the set of possible blank nodes is arbitrary. RDF makes no reference to any
* internal structure of blank nodes.
*
* @see <a href= "http://www.w3.org/TR/rdf11-concepts/#dfn-blank-node">RDF-1.1
* Blank Node</a>
*/
public class BlankNode implements BlankNodeOrIRI {
}
| 394 |
0 | Create_ds/clerezza/api/src/main/java/org/apache | Create_ds/clerezza/api/src/main/java/org/apache/clerezza/ImmutableGraph.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza;
/**
* A graph, modeled as a set of triples.
* This interface does not extend java.util.Set because of the different
* identity constraints, i.e. two <code>Graph</code>s may be equal (isomorphic)
* even if the set of triples are not.
* <p>
* Implementations MUST be immutable and throw respective exceptions, when
* add/remove-methods are called.
*
* @author reto
*/
public interface ImmutableGraph extends Graph {
/**
* Returns true if two graphs are isomorphic
*
* @return true if two graphs are isomorphic
*/
@Override
public boolean equals(Object obj);
/**
* Return the sum of the blank-nodes independent hashes of the triples.
* More precisely the hash of the triple is calculated as follows:
* {@literal (hash(subject) >> 1) ^ hash(hashCode) ^ (hash(hashCode) << 1)}
* Where the hash-function return the hashCode of the argument
* for grounded arguments and 0 otherwise.
*
* @return hash code
*/
@Override
public int hashCode();
} | 395 |
0 | Create_ds/clerezza/api/src/main/java/org/apache | Create_ds/clerezza/api/src/main/java/org/apache/clerezza/Literal.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza;
/**
* Represents a literal value that can be a node in an RDF Graph.
* Literals are used to identify values such as numbers and dates by
* means of a lexical representation. There are two types of literals:
* PlainLiteral and TypedLiteral.
*
* @author reto
*/
public interface Literal extends RDFTerm {
/**
* The lexical form of this literal, represented by a <a
* href="http://www.unicode.org/versions/latest/">Unicode string</a>.
*
* @return The lexical form of this literal.
* @see <a
* href="http://www.w3.org/TR/rdf11-concepts/#dfn-lexical-form">RDF-1.1
* Literal lexical form</a>
*/
String getLexicalForm();
/**
* The IRI identifying the datatype that determines how the lexical form
* maps to a literal value.
*
* @return The datatype IRI for this literal.
* @see <a
* href="http://www.w3.org/TR/rdf11-concepts/#dfn-datatype-iri">RDF-1.1
* Literal datatype IRI</a>
*/
IRI getDataType();
/**
* If and only if the datatype IRI is <a
* href="http://www.w3.org/1999/02/22-rdf-syntax-ns#langString"
* >http://www.w3.org/1999/02/22-rdf-syntax-ns#langString</a>, the language
* tag for this Literal is a language tag as defined by <a
* href="http://tools.ietf.org/html/bcp47">BCP47</a>.<br>
* If the datatype IRI is not <a
* href="http://www.w3.org/1999/02/22-rdf-syntax-ns#langString"
* >http://www.w3.org/1999/02/22-rdf-syntax-ns#langString</a>, this method
* must null.
*
* @return The language tag of the literal or null if no language tag is defined
* @see <a
* href="http://www.w3.org/TR/rdf11-concepts/#dfn-language-tag">RDF-1.1
* Literal language tag</a>
*/
public Language getLanguage();
/**
* Returns true if <code>obj</code> is an instance of
* <code>literal</code> that is term-equal with this, false otherwise
* <p>
* Two literals are term-equal (the same RDF literal) if and only if the
* two lexical forms, the two datatype IRIs, and the two language tags (if
* any) compare equal, character by character.
*
* @return true if obj equals this, false otherwise.
*/
public boolean equals(Object obj);
/**
* Returns the hash code of the lexical form plus the hash code of the
* datatype plus if the literal has a language the hash code of the
* language.
*
* @return hash code
*/
public int hashCode();
}
| 396 |
0 | Create_ds/clerezza/api/src/main/java/org/apache | Create_ds/clerezza/api/src/main/java/org/apache/clerezza/BlankNodeOrIRI.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza;
/**
* Represents a <code>Resource</code> that is not a <code>Literal</code>.
* This is a marker interface implemented by <code>UriRef</code>
* and <code>BNode</code>.
*
* @author reto
*/
public interface BlankNodeOrIRI extends RDFTerm {
}
| 397 |
0 | Create_ds/clerezza/api/src/main/java/org/apache | Create_ds/clerezza/api/src/main/java/org/apache/clerezza/IRI.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza;
import java.io.Serializable;
/**
* Represents an RDF URI Reference
* <p>
* RDF URI References are defined in section 6.4 RDF URI References of
* http://www.w3.org/TR/2004/REC-rdf-concepts-20040210/#section-Graph-URIref
* <p>
* Note that an RDF URI Reference is not the same as defined by RFC3986,
* RDF URI References support most unicode characters
*
* @author reto
*/
public class IRI implements BlankNodeOrIRI, Serializable {
private String unicodeString;
public IRI(String unicodeString) {
this.unicodeString = unicodeString;
}
/**
* @return the unicode string that produces the URI
*/
public String getUnicodeString() {
return unicodeString;
}
/**
* Returns true iff <code>obj</code> == <code>UriRef</code>
*
* @param obj
* @return true if obj is an instanceof UriRef with
* the same unicode-string, false otherwise
*/
@Override
public boolean equals(Object obj) {
if (!(obj instanceof IRI)) {
return false;
}
return unicodeString.equals(((IRI) obj).getUnicodeString());
}
/**
* @return 5 + the hashcode of the string
*/
@Override
public int hashCode() {
int hash = 5 + unicodeString.hashCode();
return hash;
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append('<');
buffer.append(unicodeString);
buffer.append('>');
return buffer.toString();
}
} | 398 |
0 | Create_ds/clerezza/api/src/main/java/org/apache | Create_ds/clerezza/api/src/main/java/org/apache/clerezza/Triple.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.apache.clerezza;
/**
* A structure containing a subject, a predicate, and an object.
* Also known as a statement.
*
* @author reto
*/
public interface Triple {
BlankNodeOrIRI getSubject();
IRI getPredicate();
RDFTerm getObject();
/**
* @param obj
* @return true iff subject, predicate, and object of both triples are equal
*/
@Override
boolean equals(Object obj);
/**
* The hash code is computed as follow
* {@literal (subject.hashCode() >> 1) ^ predicate.hashCode() ^ object.hashCode() << 1)}
* <p>
* Note that the hash returned is computed including the hash of BNodes, so
* it is not blank-node blind as in Graph.
* <p>
* This would have to change if triple should extend Graph
*
* @return hash code
*/
@Override
int hashCode();
}
| 399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.