index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly/ProviderBundleTrackerCustomizerGenericCapabilityTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Dictionary;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import org.apache.aries.mytest.MySPI;
import org.apache.aries.mytest.MySPI2;
import org.apache.aries.spifly.impl4.MySPIImpl4a;
import org.apache.aries.spifly.impl4.MySPIImpl4b;
import org.apache.aries.spifly.impl4.MySPIImpl4c;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.junit.Test;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.osgi.framework.ServiceFactory;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.ServiceRegistration;
import org.osgi.framework.wiring.BundleRequirement;
import org.osgi.framework.wiring.BundleRevision;
import org.osgi.framework.wiring.BundleWire;
import org.osgi.framework.wiring.BundleWiring;
import aQute.bnd.header.Parameters;
public class ProviderBundleTrackerCustomizerGenericCapabilityTest {
@Test
public void testAddingRemovedBundle() throws Exception {
Bundle mediatorBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(mediatorBundle.getBundleId()).andReturn(42l).anyTimes();
EasyMock.replay(mediatorBundle);
BaseActivator activator = new BaseActivator() {
@Override
public void start(BundleContext context) throws Exception {}
};
ProviderBundleTrackerCustomizer customizer = new ProviderBundleTrackerCustomizer(activator, mediatorBundle);
@SuppressWarnings("rawtypes")
ServiceRegistration sreg = EasyMock.createMock(ServiceRegistration.class);
sreg.unregister();
EasyMock.expectLastCall();
EasyMock.replay(sreg);
BundleContext implBC = mockSPIBundleContext(sreg);
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.REQUIRE_CAPABILITY, SpiFlyConstants.PROVIDER_REQUIREMENT);
headers.put(SpiFlyConstants.PROVIDE_CAPABILITY, SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "; " +
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "=org.apache.aries.mytest.MySPI");
Bundle implBundle = mockSPIBundle(implBC, headers);
assertEquals("Precondition", 0, activator.findProviderBundles("org.apache.aries.mytest.MySPI").size());
// Call addingBundle();
@SuppressWarnings("rawtypes")
List<ServiceRegistration> registrations = customizer.addingBundle(implBundle, null);
Collection<Bundle> bundles = activator.findProviderBundles("org.apache.aries.mytest.MySPI");
assertEquals(1, bundles.size());
assertSame(implBundle, bundles.iterator().next());
// The bc.registerService() call should now have been made
EasyMock.verify(implBC);
// Call removedBundle();
customizer.removedBundle(implBundle, null, registrations);
Collection<Bundle> bundles2 = activator.findProviderBundles("org.apache.aries.mytest.MySPI");
assertEquals(0, bundles2.size());
// sreg.unregister() should have been called.
EasyMock.verify(sreg);
}
@Test
public void testCapReqHeadersInFragment() throws Exception {
Bundle mediatorBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(mediatorBundle.getBundleId()).andReturn(42l).anyTimes();
EasyMock.replay(mediatorBundle);
BaseActivator activator = new BaseActivator() {
@Override
public void start(BundleContext context) throws Exception {}
};
ProviderBundleTrackerCustomizer customizer = new ProviderBundleTrackerCustomizer(activator, mediatorBundle);
ServiceRegistration<?> sreg = EasyMock.createNiceMock(ServiceRegistration.class);
EasyMock.replay(sreg);
BundleContext implBC = mockSPIBundleContext(sreg);
Dictionary<String, String> headers = new Hashtable<String, String>();
// A typical requirement that is not for us...
headers.put(SpiFlyConstants.REQUIRE_CAPABILITY, "osgi.ee;filter:=\"(&(osgi.ee=JavaSE)(version=1.6))\"");
List<BundleWire> wires = new ArrayList<BundleWire>();
BundleWire wire = EasyMock.createMock(BundleWire.class);
Bundle fragment = EasyMock.createMock(Bundle.class);
BundleRevision frev = EasyMock.createMock(BundleRevision.class);
EasyMock.expect(frev.getBundle()).andReturn(fragment).anyTimes();
EasyMock.replay(frev);
BundleRequirement req = EasyMock.createMock(BundleRequirement.class);
EasyMock.expect(req.getRevision()).andReturn(frev).anyTimes();
EasyMock.replay(req);
EasyMock.expect(wire.getRequirement()).andReturn(req).anyTimes();
EasyMock.replay(wire);
wires.add(wire);
BundleWiring bw = EasyMock.createMock(BundleWiring.class);
EasyMock.expect(bw.getProvidedWires("osgi.wiring.host")).andReturn(wires).anyTimes();
EasyMock.replay(bw);
BundleRevision rev = EasyMock.createMock(BundleRevision.class);
EasyMock.expect(rev.getWiring()).andReturn(bw).anyTimes();
EasyMock.expect(rev.getTypes()).andReturn(0).anyTimes();
EasyMock.replay(rev);
Bundle implBundle = mockSPIBundle(implBC, headers, rev);
Dictionary<String, String> fheaders = new Hashtable<String, String>();
fheaders.put(SpiFlyConstants.REQUIRE_CAPABILITY, SpiFlyConstants.PROVIDER_REQUIREMENT);
fheaders.put(SpiFlyConstants.PROVIDE_CAPABILITY, SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "; " +
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "=org.apache.aries.mytest.MySPI");
EasyMock.expect(fragment.getHeaders()).andReturn(fheaders).anyTimes();
EasyMock.replay(fragment);
assertEquals("Precondition", 0, activator.findProviderBundles("org.apache.aries.mytest.MySPI").size());
customizer.addingBundle(implBundle, null);
Collection<Bundle> bundles = activator.findProviderBundles("org.apache.aries.mytest.MySPI");
assertEquals(1, bundles.size());
assertSame(implBundle, bundles.iterator().next());
}
@Test
public void testCustomAttributesBundle() throws Exception {
Bundle mediatorBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(mediatorBundle.getBundleId()).andReturn(42l).anyTimes();
EasyMock.replay(mediatorBundle);
BaseActivator activator = new BaseActivator() {
@Override
public void start(BundleContext context) throws Exception {}
};
ProviderBundleTrackerCustomizer customizer = new ProviderBundleTrackerCustomizer(activator, mediatorBundle);
@SuppressWarnings("rawtypes")
ServiceRegistration sreg = EasyMock.createMock(ServiceRegistration.class);
EasyMock.replay(sreg);
BundleContext implBC = mockSPIBundleContext(sreg);
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.REQUIRE_CAPABILITY, SpiFlyConstants.PROVIDER_REQUIREMENT);
headers.put(SpiFlyConstants.PROVIDE_CAPABILITY, SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "; " +
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "=org.apache.aries.mytest.MySPI; approval=yeah; ");
Bundle implBundle = mockSPIBundle(implBC, headers);
@SuppressWarnings("rawtypes")
List<ServiceRegistration> registrations = customizer.addingBundle(implBundle, null);
assertEquals(1, registrations.size());
Collection<Bundle> bundles = activator.findProviderBundles("org.apache.aries.mytest.MySPI");
assertEquals(1, bundles.size());
assertSame(implBundle, bundles.iterator().next());
Map<String, Object> attrs = activator.getCustomBundleAttributes("org.apache.aries.mytest.MySPI", implBundle);
assertEquals(4, attrs.size());
assertEquals("yeah", attrs.get("approval"));
}
@Test
public void testAutoProviderSystemProperty() throws Exception {
Bundle mediatorBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(mediatorBundle.getBundleId()).andReturn(42l).anyTimes();
EasyMock.replay(mediatorBundle);
BaseActivator activator = new BaseActivator() {
@Override
public void start(BundleContext context) throws Exception {}
};
activator.setAutoProviderInstructions(Optional.of(new Parameters("*")));
ProviderBundleTrackerCustomizer customizer = new ProviderBundleTrackerCustomizer(activator, mediatorBundle);
@SuppressWarnings("rawtypes")
ServiceRegistration sreg = EasyMock.createMock(ServiceRegistration.class);
EasyMock.replay(sreg);
BundleContext implBC = mockSPIBundleContext(sreg);
Dictionary<String, String> headers = new Hashtable<String, String>();
Bundle implBundle = mockSPIBundle(implBC, headers);
@SuppressWarnings("rawtypes")
List<ServiceRegistration> registrations = customizer.addingBundle(implBundle, null);
assertEquals(1, registrations.size());
Collection<Bundle> bundles = activator.findProviderBundles("org.apache.aries.mytest.MySPI");
assertEquals(1, bundles.size());
assertSame(implBundle, bundles.iterator().next());
Map<String, Object> attrs = activator.getCustomBundleAttributes("org.apache.aries.mytest.MySPI", implBundle);
assertEquals(3, attrs.size());
assertNull(attrs.get("approval"));
}
@Test
public void testAutoProviderSystemPropertyPlusProperty() throws Exception {
Bundle mediatorBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(mediatorBundle.getBundleId()).andReturn(42l).anyTimes();
EasyMock.replay(mediatorBundle);
BaseActivator activator = new BaseActivator() {
@Override
public void start(BundleContext context) throws Exception {}
};
activator.setAutoProviderInstructions(Optional.of(new Parameters("*;approval=yeah")));
ProviderBundleTrackerCustomizer customizer = new ProviderBundleTrackerCustomizer(activator, mediatorBundle);
@SuppressWarnings("rawtypes")
ServiceRegistration sreg = EasyMock.createMock(ServiceRegistration.class);
EasyMock.replay(sreg);
BundleContext implBC = mockSPIBundleContext(sreg);
Dictionary<String, String> headers = new Hashtable<String, String>();
Bundle implBundle = mockSPIBundle(implBC, headers);
@SuppressWarnings("rawtypes")
List<ServiceRegistration> registrations = customizer.addingBundle(implBundle, null);
assertEquals(1, registrations.size());
Collection<Bundle> bundles = activator.findProviderBundles("org.apache.aries.mytest.MySPI");
assertEquals(1, bundles.size());
assertSame(implBundle, bundles.iterator().next());
Map<String, Object> attrs = activator.getCustomBundleAttributes("org.apache.aries.mytest.MySPI", implBundle);
assertEquals(4, attrs.size());
assertEquals("yeah", attrs.get("approval"));
}
@Test
public void testAutoProviderSystemPropertyTargetBundle() throws Exception {
Bundle mediatorBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(mediatorBundle.getBundleId()).andReturn(42l).anyTimes();
EasyMock.replay(mediatorBundle);
BaseActivator activator = new BaseActivator() {
@Override
public void start(BundleContext context) throws Exception {}
};
activator.setAutoProviderInstructions(Optional.of(new Parameters("bsn")));
ProviderBundleTrackerCustomizer customizer = new ProviderBundleTrackerCustomizer(activator, mediatorBundle);
@SuppressWarnings("rawtypes")
ServiceRegistration sreg = EasyMock.createMock(ServiceRegistration.class);
EasyMock.replay(sreg);
BundleContext implBC = mockSPIBundleContext(sreg);
Dictionary<String, String> headers = new Hashtable<String, String>();
Bundle implBundle = mockSPIBundle(implBC, headers);
@SuppressWarnings("rawtypes")
List<ServiceRegistration> registrations = customizer.addingBundle(implBundle, null);
assertEquals(1, registrations.size());
Collection<Bundle> bundles = activator.findProviderBundles("org.apache.aries.mytest.MySPI");
assertEquals(1, bundles.size());
assertSame(implBundle, bundles.iterator().next());
Map<String, Object> attrs = activator.getCustomBundleAttributes("org.apache.aries.mytest.MySPI", implBundle);
assertEquals(3, attrs.size());
}
@Test
public void testNonServiceRegistryBundle() throws Exception {
Bundle mediatorBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(mediatorBundle.getBundleId()).andReturn(42l).anyTimes();
EasyMock.replay(mediatorBundle);
BaseActivator activator = new BaseActivator() {
@Override
public void start(BundleContext context) throws Exception {}
};
ProviderBundleTrackerCustomizer customizer = new ProviderBundleTrackerCustomizer(activator, mediatorBundle);
@SuppressWarnings("rawtypes")
ServiceRegistration sreg = EasyMock.createMock(ServiceRegistration.class);
EasyMock.replay(sreg);
BundleContext implBC = mockSPIBundleContext(sreg);
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.REQUIRE_CAPABILITY, SpiFlyConstants.PROVIDER_REQUIREMENT);
headers.put(SpiFlyConstants.PROVIDE_CAPABILITY, SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "; " +
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "=org.apache.aries.mytest.MySPI; approval=yeah;" +
SpiFlyConstants.REGISTER_DIRECTIVE + "=\"\"");
Bundle implBundle = mockSPIBundle(implBC, headers);
@SuppressWarnings("rawtypes")
List<ServiceRegistration> registrations = customizer.addingBundle(implBundle, null);
assertEquals(0, registrations.size());
Collection<Bundle> bundles = activator.findProviderBundles("org.apache.aries.mytest.MySPI");
assertEquals(1, bundles.size());
assertSame(implBundle, bundles.iterator().next());
Map<String, Object> attrs = activator.getCustomBundleAttributes("org.apache.aries.mytest.MySPI", implBundle);
assertEquals(attrs.toString(), 1, attrs.size());
assertEquals("yeah", attrs.get("approval"));
}
@Test
public void testRegisterAltAttributeDatatype() throws Exception {
// TODO
}
@Test
public void testServiceSubsetSelectionAndRegistrationProperties() throws Exception {
Bundle mediatorBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(mediatorBundle.getBundleId()).andReturn(42l).anyTimes();
EasyMock.replay(mediatorBundle);
BaseActivator activator = new BaseActivator() {
@Override
public void start(BundleContext context) throws Exception {}
};
ProviderBundleTrackerCustomizer customizer = new ProviderBundleTrackerCustomizer(activator, mediatorBundle);
BundleContext implBC = mockSPIBundleContext4();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.REQUIRE_CAPABILITY, SpiFlyConstants.PROVIDER_REQUIREMENT);
headers.put(SpiFlyConstants.PROVIDE_CAPABILITY, SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "; " +
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "=org.apache.aries.mytest.MySPI2; approval=yeah; " +
SpiFlyConstants.REGISTER_DIRECTIVE + "=\"org.apache.aries.spifly.impl4.MySPIImpl4b\"");
Bundle implBundle = mockSPIBundle4(implBC, headers);
@SuppressWarnings("rawtypes")
List<ServiceRegistration> registrations = customizer.addingBundle(implBundle, null);
assertEquals(1, registrations.size());
String[] objectClassProp = (String [])registrations.iterator().next().getReference().getProperty(Constants.OBJECTCLASS);
assertEquals(1, objectClassProp.length);
assertEquals("org.apache.aries.mytest.MySPI2", objectClassProp[0]);
assertNotNull(registrations.iterator().next().getReference().getProperty(SpiFlyConstants.SERVICELOADER_MEDIATOR_PROPERTY));
assertEquals("yeah", registrations.iterator().next().getReference().getProperty("approval"));
}
@Test
public void testProvidedSPIDirective() throws Exception {
Bundle mediatorBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(mediatorBundle.getBundleId()).andReturn(42l).anyTimes();
EasyMock.replay(mediatorBundle);
BaseActivator activator = new BaseActivator() {
@Override
public void start(BundleContext context) throws Exception {}
};
ProviderBundleTrackerCustomizer customizer = new ProviderBundleTrackerCustomizer(activator, mediatorBundle);
BundleContext implBC = mockSPIBundleContext4();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.REQUIRE_CAPABILITY, SpiFlyConstants.PROVIDER_REQUIREMENT);
headers.put(SpiFlyConstants.PROVIDE_CAPABILITY,
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "; " +
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "=org.apache.aries.mytest.MySPI; approval=yeah, " +
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "; " +
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "=org.apache.aries.mytest.MySPI2");
Bundle implBundle = mockSPIBundle4(implBC, headers);
@SuppressWarnings("rawtypes")
List<ServiceRegistration> registrations = customizer.addingBundle(implBundle, null);
assertEquals("Expected 3 registrations, one for MySPI and 2 for MySPI2", 3, registrations.size());
Set<String> expectedObjectClasses = new HashSet<String>(Arrays.asList("org.apache.aries.mytest.MySPI", "org.apache.aries.mytest.MySPI2"));
Set<String> actualObjectClasses = new HashSet<String>();
boolean foundMySPI = false;
boolean foundMySPI2 = false;
for (@SuppressWarnings("rawtypes") ServiceRegistration sr : registrations) {
List<String> objectClasses = Arrays.asList((String[]) sr.getReference().getProperty(Constants.OBJECTCLASS));
actualObjectClasses.addAll(objectClasses);
assertNotNull(sr.getReference().getProperty(SpiFlyConstants.SERVICELOADER_MEDIATOR_PROPERTY));
if (objectClasses.contains("org.apache.aries.mytest.MySPI")) {
assertEquals("yeah", sr.getReference().getProperty("approval"));
foundMySPI = true;
} else if (objectClasses.contains("org.apache.aries.mytest.MySPI2")) {
assertNull(sr.getReference().getProperty("approval"));
foundMySPI2 = true;
}
}
assertTrue(foundMySPI);
assertTrue(foundMySPI2);
assertEquals(expectedObjectClasses, actualObjectClasses);
}
@Test
public void testMultipleServiceInstancesAndTypes() throws Exception {
Bundle mediatorBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(mediatorBundle.getBundleId()).andReturn(42l).anyTimes();
EasyMock.replay(mediatorBundle);
BaseActivator activator = new BaseActivator() {
@Override
public void start(BundleContext context) throws Exception {}
};
ProviderBundleTrackerCustomizer customizer = new ProviderBundleTrackerCustomizer(activator, mediatorBundle);
BundleContext implBC = mockSPIBundleContext4();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.REQUIRE_CAPABILITY, SpiFlyConstants.PROVIDER_REQUIREMENT);
headers.put(SpiFlyConstants.PROVIDE_CAPABILITY,
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "; " +
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "=org.apache.aries.mytest.MySPI," +
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "; " +
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "=org.apache.aries.mytest.MySPI2");
Bundle implBundle = mockSPIBundle4(implBC, headers);
@SuppressWarnings("rawtypes")
List<ServiceRegistration> registrations = customizer.addingBundle(implBundle, null);
assertEquals(3, registrations.size());
boolean foundA = false, foundB = false, foundC = false;
for (@SuppressWarnings("rawtypes") ServiceRegistration sreg : registrations) {
@SuppressWarnings("rawtypes")
ServiceReference sref = sreg.getReference();
String objectClassName = ((String [])sref.getProperty(Constants.OBJECTCLASS))[0];
String serviceImplClassName = (String) sref.getProperty(SpiFlyConstants.PROVIDER_IMPLCLASS_PROPERTY);
if (MySPIImpl4a.class.getName().equals(serviceImplClassName)) {
assertEquals("org.apache.aries.mytest.MySPI", objectClassName);
@SuppressWarnings("unchecked")
MySPI svc = (MySPI) implBC.getService(sreg.getReference());
assertEquals("impl4a", svc.someMethod(""));
foundA = true;
} else if (MySPIImpl4b.class.getName().equals(serviceImplClassName)) {
assertEquals("org.apache.aries.mytest.MySPI2", objectClassName);
@SuppressWarnings("unchecked")
MySPI2 svc = (MySPI2) implBC.getService(sreg.getReference());
assertEquals("impl4b", svc.someMethod(""));
foundB = true;
} else if (MySPIImpl4c.class.getName().equals(serviceImplClassName)) {
assertEquals("org.apache.aries.mytest.MySPI2", objectClassName);
@SuppressWarnings("unchecked")
MySPI2 svc = (MySPI2) implBC.getService(sreg.getReference());
assertEquals("impl4c", svc.someMethod(""));
foundC = true;
}
}
assertTrue(foundA);
assertTrue(foundB);
assertTrue(foundC);
}
@Test
public void testNoServiceRegistration() throws Exception {
Bundle mediatorBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(mediatorBundle.getBundleId()).andReturn(42l).anyTimes();
EasyMock.replay(mediatorBundle);
BaseActivator activator = new BaseActivator() {
@Override
public void start(BundleContext context) throws Exception {}
};
ProviderBundleTrackerCustomizer customizer = new ProviderBundleTrackerCustomizer(activator, mediatorBundle);
@SuppressWarnings("rawtypes")
ServiceRegistration sreg = EasyMock.createMock(ServiceRegistration.class);
EasyMock.replay(sreg);
BundleContext implBC = mockSPIBundleContext(sreg);
Bundle implBundle = mockSPIBundle(implBC, SpiFlyConstants.PROVIDER_REQUIREMENT);
@SuppressWarnings("rawtypes")
List<ServiceRegistration> registrations = customizer.addingBundle(implBundle, null);
assertEquals(0, registrations.size());
Collection<Bundle> bundles = activator.findProviderBundles("org.apache.aries.mytest.MySPI");
assertEquals(1, bundles.size());
assertSame(implBundle, bundles.iterator().next());
}
@SuppressWarnings({ "resource", "unchecked" })
@Test
public void testAddingBundleWithBundleClassPath() throws Exception {
Bundle mediatorBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(mediatorBundle.getBundleId()).andReturn(42l).anyTimes();
EasyMock.replay(mediatorBundle);
BaseActivator activator = new BaseActivator() {
@Override
public void start(BundleContext context) throws Exception {}
};
ProviderBundleTrackerCustomizer customizer = new ProviderBundleTrackerCustomizer(activator, mediatorBundle);
BundleContext implBC = EasyMock.createMock(BundleContext.class);
EasyMock.<Object>expect(implBC.registerService(
EasyMock.eq("org.apache.aries.mytest.MySPI"),
EasyMock.isA(ServiceFactory.class),
(Dictionary<String,?>) EasyMock.anyObject())).andReturn(EasyMock.createNiceMock(ServiceRegistration.class)).times(3);
EasyMock.replay(implBC);
Bundle implBundle = EasyMock.createNiceMock(Bundle.class);
EasyMock.expect(implBundle.getBundleContext()).andReturn(implBC).anyTimes();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.REQUIRE_CAPABILITY, SpiFlyConstants.PROVIDER_REQUIREMENT);
headers.put(SpiFlyConstants.PROVIDE_CAPABILITY,
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "; " +
SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE + "=org.apache.aries.mytest.MySPI");
headers.put(Constants.BUNDLE_CLASSPATH, ".,non-jar.jar,embedded.jar,embedded2.jar");
EasyMock.expect(implBundle.getHeaders()).andReturn(headers).anyTimes();
URL embeddedJar = getClass().getResource("/embedded.jar");
assertNotNull("precondition", embeddedJar);
EasyMock.expect(implBundle.getResource("embedded.jar")).andReturn(embeddedJar).anyTimes();
URL embedded2Jar = getClass().getResource("/embedded2.jar");
assertNotNull("precondition", embedded2Jar);
EasyMock.expect(implBundle.getResource("embedded2.jar")).andReturn(embedded2Jar).anyTimes();
URL dir = new URL("jar:" + embeddedJar + "!/META-INF/services");
assertNotNull("precondition", dir);
EasyMock.expect(implBundle.getResource("/META-INF/services")).andReturn(dir).anyTimes();
EasyMock.expect(implBundle.findEntries((String) EasyMock.anyObject(), (String) EasyMock.anyObject(), EasyMock.anyBoolean())).
andReturn(null).anyTimes();
ClassLoader cl = new URLClassLoader(new URL [] {embeddedJar}, getClass().getClassLoader());
Class<?> clsA = cl.loadClass("org.apache.aries.spifly.impl2.MySPIImpl2a");
EasyMock.<Object>expect(implBundle.loadClass("org.apache.aries.spifly.impl2.MySPIImpl2a")).andReturn(clsA).anyTimes();
Class<?> clsB = cl.loadClass("org.apache.aries.spifly.impl2.MySPIImpl2b");
EasyMock.<Object>expect(implBundle.loadClass("org.apache.aries.spifly.impl2.MySPIImpl2b")).andReturn(clsB).anyTimes();
ClassLoader cl2 = new URLClassLoader(new URL [] {embedded2Jar}, getClass().getClassLoader());
Class<?> clsC = cl2.loadClass("org.apache.aries.spifly.impl3.MySPIImpl3");
EasyMock.<Object>expect(implBundle.loadClass("org.apache.aries.spifly.impl3.MySPIImpl3")).andReturn(clsC).anyTimes();
EasyMock.replay(implBundle);
assertEquals("Precondition", 0, activator.findProviderBundles("org.apache.aries.mytest.MySPI").size());
// Call addingBundle();
customizer.addingBundle(implBundle, null);
Collection<Bundle> bundles = activator.findProviderBundles("org.apache.aries.mytest.MySPI");
assertEquals(1, bundles.size());
assertSame(implBundle, bundles.iterator().next());
// The bc.registerService() call should now have been made
EasyMock.verify(implBC);
}
@SuppressWarnings("unchecked")
private BundleContext mockSPIBundleContext(@SuppressWarnings("rawtypes") ServiceRegistration sreg) {
BundleContext implBC = EasyMock.createMock(BundleContext.class);
EasyMock.<Object>expect(implBC.registerService(
EasyMock.eq("org.apache.aries.mytest.MySPI"),
EasyMock.isA(ServiceFactory.class),
(Dictionary<String,?>) EasyMock.anyObject())).andReturn(sreg);
EasyMock.replay(implBC);
return implBC;
}
private Bundle mockSPIBundle(BundleContext implBC, String spiProviderHeader) throws ClassNotFoundException {
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.REQUIRE_CAPABILITY, spiProviderHeader);
return mockSPIBundle(implBC, headers);
}
private Bundle mockSPIBundle(BundleContext implBC, Dictionary<String, String> headers) throws ClassNotFoundException {
return mockSPIBundle(implBC, headers, null);
}
private Bundle mockSPIBundle(BundleContext implBC, Dictionary<String, String> headers, BundleRevision rev) throws ClassNotFoundException {
if (headers == null)
headers = new Hashtable<String, String>();
Bundle implBundle = EasyMock.createNiceMock(Bundle.class);
EasyMock.expect(implBundle.getBundleContext()).andReturn(implBC).anyTimes();
EasyMock.expect(implBundle.getHeaders()).andReturn(headers).anyTimes();
EasyMock.expect(implBundle.getSymbolicName()).andReturn("bsn").anyTimes();
// List the resources found at META-INF/services in the test bundle
URL dir = getClass().getResource("impl1/META-INF/services");
assertNotNull("precondition", dir);
EasyMock.expect(implBundle.getResource("/META-INF/services")).andReturn(dir).anyTimes();
URL res = getClass().getResource("impl1/META-INF/services/org.apache.aries.mytest.MySPI");
assertNotNull("precondition", res);
EasyMock.expect(implBundle.findEntries("META-INF/services", "*", false)).andReturn(
Collections.enumeration(Collections.singleton(res))).anyTimes();
Class<?> cls = getClass().getClassLoader().loadClass("org.apache.aries.spifly.impl1.MySPIImpl1");
EasyMock.<Object>expect(implBundle.loadClass("org.apache.aries.spifly.impl1.MySPIImpl1")).andReturn(cls).anyTimes();
if (rev != null)
EasyMock.expect(implBundle.adapt(BundleRevision.class)).andReturn(rev).anyTimes();
EasyMock.replay(implBundle);
return implBundle;
}
@SuppressWarnings("unchecked")
private BundleContext mockSPIBundleContext4() {
BundleContext implBC = EasyMock.createNiceMock(BundleContext.class);
implBC.registerService(EasyMock.anyString(),
EasyMock.anyObject(),
(Dictionary<String,?>)EasyMock.anyObject());
EasyMock.expectLastCall().andAnswer(new IAnswer<ServiceRegistration<Object>>() {
@Override
public ServiceRegistration<Object> answer() throws Throwable {
final String className = (String) EasyMock.getCurrentArguments()[0];
final Object serviceObject = EasyMock.getCurrentArguments()[1];
final Dictionary<String, Object> registrationProps =
(Dictionary<String, Object>) EasyMock.getCurrentArguments()[2];
return new ServiceRegistrationImpl(className, serviceObject, registrationProps);
}
}).anyTimes();
implBC.getService(EasyMock.anyObject(ServiceReference.class));
EasyMock.expectLastCall().
andAnswer(new IAnswer<Object>() {
@SuppressWarnings("rawtypes")
@Override
public Object answer() throws Throwable {
ServiceRegistrationImpl reg = (ServiceRegistrationImpl) EasyMock.getCurrentArguments()[0];
Object svc = reg.getServiceObject();
if (svc instanceof ServiceFactory) {
return ((ServiceFactory) svc).getService(null, reg);
} else {
return svc;
}
}
}).anyTimes();
EasyMock.replay(implBC);
return implBC;
}
private Bundle mockSPIBundle4(BundleContext implBC, Dictionary<String, String> headers) throws ClassNotFoundException {
Bundle implBundle = EasyMock.createNiceMock(Bundle.class);
EasyMock.expect(implBundle.getBundleContext()).andReturn(implBC).anyTimes();
EasyMock.expect(implBundle.getHeaders()).andReturn(headers).anyTimes();
// List the resources found at META-INF/services in the test bundle
URL dir = getClass().getResource("impl4/META-INF/services");
assertNotNull("precondition", dir);
EasyMock.expect(implBundle.getResource("/META-INF/services")).andReturn(dir).anyTimes();
URL res = getClass().getResource("impl4/META-INF/services/org.apache.aries.mytest.MySPI");
assertNotNull("precondition", res);
URL res2 = getClass().getResource("impl4/META-INF/services/org.apache.aries.mytest.MySPI2");
assertNotNull("precondition", res2);
EasyMock.expect(implBundle.findEntries("META-INF/services", "*", false)).andReturn(
Collections.enumeration(Arrays.asList(res, res2))).anyTimes();
Class<?> clsa = getClass().getClassLoader().loadClass("org.apache.aries.spifly.impl4.MySPIImpl4a");
EasyMock.<Object>expect(implBundle.loadClass("org.apache.aries.spifly.impl4.MySPIImpl4a")).andReturn(clsa).anyTimes();
Class<?> clsb = getClass().getClassLoader().loadClass("org.apache.aries.spifly.impl4.MySPIImpl4b");
EasyMock.<Object>expect(implBundle.loadClass("org.apache.aries.spifly.impl4.MySPIImpl4b")).andReturn(clsb).anyTimes();
Class<?> clsc = getClass().getClassLoader().loadClass("org.apache.aries.spifly.impl4.MySPIImpl4c");
EasyMock.<Object>expect(implBundle.loadClass("org.apache.aries.spifly.impl4.MySPIImpl4c")).andReturn(clsc).anyTimes();
EasyMock.replay(implBundle);
return implBundle;
}
@SuppressWarnings("rawtypes")
private static class ServiceRegistrationImpl implements ServiceRegistration<Object>, ServiceReference {
private final Object serviceObject;
private final Dictionary<String, Object> properties;
public ServiceRegistrationImpl(String className, Object serviceObject, Dictionary<String, Object> properties) {
this.serviceObject = serviceObject;
this.properties = properties;
this.properties.put(Constants.OBJECTCLASS, new String[] {className});
}
Object getServiceObject() {
return serviceObject;
}
@SuppressWarnings("unchecked")
@Override
public ServiceReference<Object> getReference() {
return this;
}
@Override
public void setProperties(Dictionary<String, ?> properties) {
throw new UnsupportedOperationException();
}
@Override
public void unregister() {
}
@Override
public Object getProperty(String key) {
return properties.get(key);
}
@Override
public String[] getPropertyKeys() {
return Collections.list(properties.keys()).toArray(new String [] {});
}
@Override
public Bundle getBundle() {
throw new UnsupportedOperationException();
}
@Override
public Bundle[] getUsingBundles() {
throw new UnsupportedOperationException();
}
@Override
public boolean isAssignableTo(Bundle bundle, String className) {
throw new UnsupportedOperationException();
}
@Override
public int compareTo(Object reference) {
throw new UnsupportedOperationException();
}
}
}
| 9,700 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly/impl1/MySPIImpl1.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly.impl1;
import org.apache.aries.mytest.MySPI;
public class MySPIImpl1 implements MySPI{
@Override
public String someMethod(String s) {
return new StringBuilder(s).reverse().toString();
}
}
| 9,701 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly/impl2/MySPIImpl2a.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly.impl2;
import org.apache.aries.mytest.MySPI;
public class MySPIImpl2a implements MySPI {
@Override
public String someMethod(String s) {
return "" + s.length();
}
}
| 9,702 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly/impl2/MySPIImpl2b.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly.impl2;
import org.apache.aries.mytest.MySPI;
public class MySPIImpl2b implements MySPI {
@Override
public String someMethod(String s) {
return "-" + s.length();
}
}
| 9,703 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly/impl4/MySPIImpl4a.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly.impl4;
import org.apache.aries.mytest.MySPI;
public class MySPIImpl4a implements MySPI {
@Override
public String someMethod(String s) {
return "impl4a";
}
}
| 9,704 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly/impl4/MySPIImpl4b.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly.impl4;
import org.apache.aries.mytest.MySPI2;
public class MySPIImpl4b implements MySPI2 {
@Override
public String someMethod(String s) {
return "impl4b";
}
}
| 9,705 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly/impl4/MySPIImpl4c.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly.impl4;
import org.apache.aries.mytest.MySPI2;
public class MySPIImpl4c implements MySPI2 {
@Override
public String someMethod(String s) {
return "impl4c";
}
}
| 9,706 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly | Create_ds/aries/spi-fly/spi-fly-core/src/test/java/org/apache/aries/spifly/impl3/MySPIImpl3.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly.impl3;
import org.apache.aries.mytest.MySPI;
public class MySPIImpl3 implements MySPI {
@Override
public String someMethod(String s) {
return "impl3";
}
}
| 9,707 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/ProviderBundleTrackerCustomizer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import static java.util.stream.Collectors.toList;
import static org.osgi.framework.wiring.BundleRevision.TYPE_FRAGMENT;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import java.util.logging.Level;
import java.util.stream.Stream;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleEvent;
import org.osgi.framework.Constants;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServicePermission;
import org.osgi.framework.ServiceRegistration;
import org.osgi.framework.wiring.BundleRevision;
import org.osgi.framework.wiring.BundleWire;
import org.osgi.framework.wiring.BundleWiring;
import org.osgi.util.tracker.BundleTrackerCustomizer;
import aQute.bnd.header.Attrs;
import aQute.bnd.header.OSGiHeader;
import aQute.bnd.header.Parameters;
import aQute.bnd.stream.MapStream;
import aQute.libg.glob.Glob;
/**
* Listens for new bundles being installed and registers them as service providers if applicable.
*/
@SuppressWarnings("rawtypes")
public class ProviderBundleTrackerCustomizer implements BundleTrackerCustomizer {
private static final String METAINF_SERVICES = "META-INF/services";
private static final List<String> MERGE_HEADERS = Arrays.asList(
Constants.IMPORT_PACKAGE, Constants.REQUIRE_BUNDLE, Constants.EXPORT_PACKAGE,
Constants.PROVIDE_CAPABILITY, Constants.REQUIRE_CAPABILITY);
final BaseActivator activator;
final Bundle spiBundle;
public ProviderBundleTrackerCustomizer(BaseActivator activator, Bundle spiBundle) {
this.activator = activator;
this.spiBundle = spiBundle;
}
@Override
public List<ServiceRegistration> addingBundle(final Bundle bundle, BundleEvent event) {
BundleRevision bundleRevision = bundle.adapt(BundleRevision.class);
if (bundle.equals(spiBundle) || ((bundleRevision != null) && ((bundleRevision.getTypes() & TYPE_FRAGMENT) == TYPE_FRAGMENT)))
return null; // don't process the SPI bundle itself
log(Level.FINE, "Bundle Considered for SPI providers: "
+ bundle.getSymbolicName());
DiscoveryMode discoveryMode = DiscoveryMode.SERVICELOADER_CAPABILITIES;
List<String> providedServices = null;
Map<String, Object> customAttributes = new HashMap<String, Object>();
if (bundle.getHeaders().get(SpiFlyConstants.REQUIRE_CAPABILITY) != null) {
try {
providedServices = readServiceLoaderMediatorCapabilityMetadata(bundle, customAttributes);
} catch (InvalidSyntaxException e) {
log(Level.FINE, "Unable to read capabilities from bundle " + bundle, e);
}
}
String spiProviderHeader = getHeaderFromBundleOrFragment(bundle, SpiFlyConstants.SPI_PROVIDER_HEADER);
if (providedServices == null && spiProviderHeader != null) {
String header = spiProviderHeader.trim();
if ("*".equals(header)) {
providedServices = new ArrayList<String>();
} else {
providedServices = Stream.of(header.split(",")).map(String::trim).collect(toList());
}
discoveryMode = DiscoveryMode.SPI_PROVIDER_HEADER;
}
List<URL> serviceFileURLs = null;
if (providedServices == null) {
Entry<List<String>, List<URL>> autoServices = getFromAutoProviderProperty(bundle, customAttributes);
providedServices = autoServices.getKey();
serviceFileURLs = autoServices.getValue();
discoveryMode = DiscoveryMode.AUTO_PROVIDERS_PROPERTY;
}
if (providedServices == null) {
log(Level.FINE, "No provided SPI services. Skipping bundle: "
+ bundle.getSymbolicName());
return null;
} else {
log(Level.FINE, "Examining bundle for SPI provider: "
+ bundle.getSymbolicName());
}
for (String serviceType : providedServices) {
// Eagerly register any services that are explicitly listed, as they may not be found in META-INF/services
activator.registerProviderBundle(serviceType, bundle, customAttributes);
}
if (serviceFileURLs == null) {
serviceFileURLs = getServiceFileUrls(bundle);
}
final List<ServiceRegistration> registrations = new ArrayList<ServiceRegistration>();
for (ServiceDetails details : collectServiceDetails(bundle, serviceFileURLs, discoveryMode)) {
if (providedServices.size() > 0 && !providedServices.contains(details.serviceType))
continue;
try {
final Class<?> cls = bundle.loadClass(details.instanceType);
log(Level.FINE, "Loaded SPI provider: " + cls);
if (details.properties != null) {
ServiceRegistration reg = null;
Object instance =
(details.properties.containsKey("service.scope") &&
"prototype".equalsIgnoreCase(String.valueOf(details.properties.get("service.scope")))) ?
new ProviderPrototypeServiceFactory(cls) :
new ProviderServiceFactory(cls);
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
if (bundle.hasPermission(new ServicePermission(details.serviceType, ServicePermission.REGISTER))) {
reg = bundle.getBundleContext().registerService(
details.serviceType, instance, details.properties);
} else {
log(Level.FINE, "Bundle " + bundle + " does not have the permission to register services of type: " + details.serviceType);
}
} else {
reg = bundle.getBundleContext().registerService(
details.serviceType, instance, details.properties);
}
if (reg != null) {
registrations.add(reg);
log(Level.FINE, "Registered service: " + reg);
}
}
activator.registerProviderBundle(details.serviceType, bundle, details.properties);
log(Level.INFO, "Registered provider " + details.instanceType + " of service " + details.serviceType + " in bundle " + bundle.getSymbolicName());
} catch (Exception | NoClassDefFoundError e) {
log(Level.FINE,
"Could not load provider " + details.instanceType + " of service " + details.serviceType, e);
}
}
return registrations;
}
private List<ServiceDetails> collectServiceDetails(Bundle bundle, List<URL> serviceFileURLs, DiscoveryMode discoveryMode) {
List<ServiceDetails> serviceDetails = new ArrayList<>();
for (URL serviceFileURL : serviceFileURLs) {
log(Level.FINE, "Found SPI resource: " + serviceFileURL);
try {
BufferedReader reader = new BufferedReader(
new InputStreamReader(serviceFileURL.openStream()));
String className = null;
while((className = reader.readLine()) != null) {
try {
className = className.trim();
if (className.length() == 0)
continue; // empty line
if (className.startsWith("#"))
continue; // a comment
String serviceFile = serviceFileURL.toExternalForm();
int idx = serviceFile.lastIndexOf('/');
String registrationClassName = className;
if (serviceFile.length() > idx) {
registrationClassName = serviceFile.substring(idx + 1);
}
final Hashtable<String, Object> properties;
if (discoveryMode == DiscoveryMode.SPI_PROVIDER_HEADER) {
properties = new Hashtable<String, Object>();
}
else if (discoveryMode == DiscoveryMode.AUTO_PROVIDERS_PROPERTY) {
properties = activator.getAutoProviderInstructions().map(
Parameters::stream
).orElseGet(MapStream::empty).filterKey(
i -> Glob.toPattern(i).asPredicate().test(bundle.getSymbolicName())
).values().findFirst().map(
Hashtable<String, Object>::new
).orElseGet(() -> new Hashtable<String, Object>());
}
else {
properties = findServiceRegistrationProperties(bundle, registrationClassName, className);
}
if (properties != null) {
properties.put(SpiFlyConstants.SERVICELOADER_MEDIATOR_PROPERTY, spiBundle.getBundleId());
properties.put(SpiFlyConstants.PROVIDER_IMPLCLASS_PROPERTY, className);
properties.put(SpiFlyConstants.PROVIDER_DISCOVERY_MODE, discoveryMode.toString());
}
serviceDetails.add(new ServiceDetails(registrationClassName, className, properties));
} catch (Exception e) {
log(Level.FINE,
"Could not load SPI implementation referred from " + serviceFileURL, e);
}
}
} catch (IOException e) {
log(Level.FINE, "Could not read SPI metadata from " + serviceFileURL, e);
}
}
return serviceDetails;
}
private Entry<List<String>, List<URL>> getFromAutoProviderProperty(Bundle bundle, Map<String, Object> customAttributes) {
return activator.getAutoProviderInstructions().map(
Parameters::stream
).orElseGet(MapStream::empty).filterKey(
i ->
Glob.toPattern(i).asPredicate().test(bundle.getSymbolicName())
).values().findFirst().map(
un -> {
List<URL> serviceFileURLs = getServiceFileUrls(bundle);
List<ServiceDetails> collectServiceDetails = collectServiceDetails(bundle, serviceFileURLs, DiscoveryMode.AUTO_PROVIDERS_PROPERTY);
collectServiceDetails.stream().map(ServiceDetails::getProperties).filter(Objects::nonNull).forEach(
hashtable -> hashtable.forEach(customAttributes::put)
);
List<String> providedServices = collectServiceDetails.stream().map(ServiceDetails::getServiceType).collect(toList());
return new AbstractMap.SimpleImmutableEntry<>(providedServices, serviceFileURLs);
}
).orElseGet(() -> new AbstractMap.SimpleImmutableEntry<>(null, null));
}
private List<URL> getServiceFileUrls(Bundle bundle) {
List<URL> serviceFileURLs = new ArrayList<URL>();
Enumeration<URL> entries = bundle.findEntries(METAINF_SERVICES, "*", false);
if (entries != null) {
serviceFileURLs.addAll(Collections.list(entries));
}
Object bcp = bundle.getHeaders().get(Constants.BUNDLE_CLASSPATH);
if (bcp instanceof String) {
for (String entry : ((String) bcp).split(",")) {
entry = entry.trim();
if (entry.equals("."))
continue;
URL url = bundle.getResource(entry);
if (url != null) {
serviceFileURLs.addAll(getMetaInfServiceURLsFromJar(url));
}
}
}
return serviceFileURLs;
}
private String getHeaderFromBundleOrFragment(Bundle bundle, String headerName) {
return getHeaderFromBundleOrFragment(bundle, headerName, null);
}
private String getHeaderFromBundleOrFragment(Bundle bundle, String headerName, String matchString) {
Parameters headerParameters = new Parameters(bundle.getHeaders().get(headerName));
if (matches(headerParameters.toString(), matchString) && !MERGE_HEADERS.contains(headerName)) {
return headerParameters.isEmpty() ? null : headerParameters.toString();
}
BundleRevision rev = bundle.adapt(BundleRevision.class);
if (rev != null) {
BundleWiring wiring = rev.getWiring();
if (wiring != null) {
for (BundleWire wire : wiring.getProvidedWires("osgi.wiring.host")) {
Bundle fragment = wire.getRequirement().getRevision().getBundle();
Parameters fragmentParameters = new Parameters(fragment.getHeaders().get(headerName));
if (MERGE_HEADERS.contains(headerName)) {
headerParameters.mergeWith(fragmentParameters, false);
}
else {
headerParameters = fragmentParameters;
}
if (matches(headerParameters.toString(), matchString)) {
return headerParameters.toString();
}
}
}
}
return headerParameters.isEmpty() ? null : headerParameters.toString();
}
private boolean matches(String val, String matchString) {
if (val == null)
return false;
if (matchString == null)
return true;
int idx = val.indexOf(matchString);
return idx >= 0;
}
// An empty list returned means 'all SPIs'
// A return value of null means no SPIs
// A populated list means: only these SPIs
private List<String> readServiceLoaderMediatorCapabilityMetadata(Bundle bundle, Map<String, Object> customAttributes) throws InvalidSyntaxException {
String requirementHeader = getHeaderFromBundleOrFragment(bundle, SpiFlyConstants.REQUIRE_CAPABILITY, SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE);
if (requirementHeader == null)
return null;
Parameters requirements = OSGiHeader.parseHeader(requirementHeader);
Entry<String, ? extends Map<String, String>> extenderRequirement = ConsumerHeaderProcessor.findRequirement(requirements, SpiFlyConstants.EXTENDER_CAPABILITY_NAMESPACE, SpiFlyConstants.REGISTRAR_EXTENDER_NAME);
if (extenderRequirement == null)
return null;
Parameters capabilities;
String capabilityHeader = getHeaderFromBundleOrFragment(bundle, SpiFlyConstants.PROVIDE_CAPABILITY, SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE);
if (capabilityHeader == null) {
capabilities = new Parameters();
} else {
capabilities = OSGiHeader.parseHeader(capabilityHeader);
}
List<String> serviceNames = new ArrayList<String>();
for (Entry<String, ? extends Map<String, String>> serviceLoaderCapability : ConsumerHeaderProcessor.findAllMetadata(capabilities, SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE)) {
for (Entry<String, String> entry : serviceLoaderCapability.getValue().entrySet()) {
if (SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE.equals(entry.getKey())) {
serviceNames.add(entry.getValue().trim());
continue;
}
if (SpiFlyConstants.REGISTER_DIRECTIVE.equals(entry.getKey()) && entry.getValue().equals("")) {
continue;
}
customAttributes.put(entry.getKey(), entry.getValue());
}
}
return serviceNames;
}
// null means don't register,
// otherwise the return value should be taken as the service registration properties
private Hashtable<String, Object> findServiceRegistrationProperties(Bundle bundle, String spiName, String implName) {
Object capabilityHeader = getHeaderFromBundleOrFragment(bundle, SpiFlyConstants.PROVIDE_CAPABILITY);
if (capabilityHeader == null)
return null;
Parameters capabilities = OSGiHeader.parseHeader(capabilityHeader.toString());
for (Map.Entry<String, Attrs> entry : capabilities.entrySet()) {
String key = ConsumerHeaderProcessor.removeDuplicateMarker(entry.getKey());
Attrs attrs = entry.getValue();
if (!SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE.equals(key))
continue;
if (!attrs.containsKey(SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE) ||
!attrs.get(SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE).equals(spiName))
continue;
if (attrs.containsKey(SpiFlyConstants.REGISTER_DIRECTIVE) &&
!attrs.get(SpiFlyConstants.REGISTER_DIRECTIVE).equals(implName))
continue;
Hashtable<String, Object> properties = new Hashtable<String, Object>();
for (Map.Entry<String, String> prop : attrs.entrySet()) {
if (SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE.equals(prop.getKey()) ||
SpiFlyConstants.REGISTER_DIRECTIVE.equals(prop.getKey()) ||
key.startsWith("."))
continue;
properties.put(prop.getKey(), prop.getValue());
}
return properties;
}
return null;
}
private List<URL> getMetaInfServiceURLsFromJar(URL url) {
List<URL> urls = new ArrayList<URL>();
try {
JarInputStream jis = null;
try {
jis = new JarInputStream(url.openStream());
JarEntry je = null;
while((je = jis.getNextJarEntry()) != null) {
if (je.getName().startsWith(METAINF_SERVICES) &&
je.getName().length() > (METAINF_SERVICES.length() + 1)) {
urls.add(new URL("jar:" + url + "!/" + je.getName()));
}
}
} finally {
if (jis != null) {
jis.close();
}
}
} catch (IOException e) {
log(Level.FINE, "Problem opening embedded jar file: " + url, e);
}
return urls;
}
@Override
public void modifiedBundle(Bundle bundle, BundleEvent event, Object registrations) {
// implementation is unnecessary for this use case
}
@Override
@SuppressWarnings("unchecked")
public void removedBundle(Bundle bundle, BundleEvent event, Object registrations) {
activator.unregisterProviderBundle(bundle);
if (registrations == null)
return;
for (ServiceRegistration reg : (List<ServiceRegistration>) registrations) {
try {
reg.unregister();
log(Level.FINE, "Unregistered: " + reg);
} catch (IllegalStateException ise) {
// Ignore the exception but do not remove the try/catch.
// There are some bundle context races on cleanup which
// are safe to ignore but unsafe not to perform our own
// cleanup. In an ideal world ServiceRegistration.unregister()
// would have been idempotent and never throw an exception.
}
}
}
private void log(Level level, String message) {
activator.log(level, message);
}
private void log(Level level, String message, Throwable th) {
activator.log(level, message, th);
}
enum DiscoveryMode {
SPI_PROVIDER_HEADER,
AUTO_PROVIDERS_PROPERTY,
SERVICELOADER_CAPABILITIES
}
class ServiceDetails {
public ServiceDetails(String serviceType, String instanceType, Hashtable<String, Object> properties) {
this.serviceType = serviceType;
this.instanceType = instanceType;
this.properties = properties;
}
public String getInstanceType() {
return instanceType;
}
public Hashtable<String, Object> getProperties() {
return properties;
}
public String getServiceType() {
return serviceType;
}
@Override
public String toString() {
return String.format(
"ServiceDetails [serviceType=\"%s\", instanceType=\"%s\", properties=%s]",
getServiceType(), getInstanceType(), getProperties());
}
private final String instanceType;
private final Hashtable<String, Object> properties;
private final String serviceType;
}
}
| 9,708 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/ConsumerBundleTrackerCustomizer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleEvent;
import org.osgi.util.tracker.BundleTrackerCustomizer;
public class ConsumerBundleTrackerCustomizer implements BundleTrackerCustomizer {
private final BaseActivator activator;
private final String headerName;
public ConsumerBundleTrackerCustomizer(BaseActivator baseActivator, String consumerHeaderName) {
activator = baseActivator;
headerName = consumerHeaderName;
}
@Override
public Object addingBundle(Bundle bundle, BundleEvent event) {
try {
activator.addConsumerWeavingData(bundle, headerName);
} catch (Exception e) {
throw new RuntimeException(e);
}
return bundle;
}
@Override
public void modifiedBundle(Bundle bundle, BundleEvent event, Object object) {
removedBundle(bundle, event, object);
addingBundle(bundle, event);
}
@Override
public void removedBundle(Bundle bundle, BundleEvent event, Object object) {
activator.removeWeavingData(bundle);
}
}
| 9,709 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/BaseActivator.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.osgi.framework.wiring.BundleRevision;
import org.osgi.framework.wiring.BundleWire;
import org.osgi.framework.wiring.BundleWiring;
import org.osgi.util.tracker.BundleTracker;
import aQute.bnd.header.Parameters;
import aQute.bnd.stream.MapStream;
import aQute.libg.glob.Glob;
public abstract class BaseActivator implements BundleActivator {
private static final Set<WeavingData> NON_WOVEN_BUNDLE = Collections.emptySet();
private static final Logger logger = Logger.getLogger(BaseActivator.class.getName());
// Static access to the activator used by the woven code, therefore
// this bundle must be a singleton.
// TODO see if we can get rid of the static access.
public static BaseActivator activator;
private BundleContext bundleContext;
@SuppressWarnings("rawtypes")
private BundleTracker consumerBundleTracker;
@SuppressWarnings("rawtypes")
private BundleTracker providerBundleTracker;
private Optional<Parameters> autoConsumerInstructions;
private Optional<Parameters> autoProviderInstructions;
private final ConcurrentMap<Bundle, Set<WeavingData>> bundleWeavingData =
new ConcurrentHashMap<Bundle, Set<WeavingData>>();
private final ConcurrentMap<String, SortedMap<Long, Pair<Bundle, Map<String, Object>>>> registeredProviders =
new ConcurrentHashMap<String, SortedMap<Long, Pair<Bundle, Map<String, Object>>>>();
private final ConcurrentMap<Bundle, Map<ConsumerRestriction, List<BundleDescriptor>>> consumerRestrictions =
new ConcurrentHashMap<Bundle, Map<ConsumerRestriction, List<BundleDescriptor>>>();
@SuppressWarnings({ "unchecked", "rawtypes" })
public synchronized void start(BundleContext context, final String consumerHeaderName) throws Exception {
bundleContext = context;
try {
autoConsumerInstructions = Optional.ofNullable(
bundleContext.getProperty("org.apache.aries.spifly.auto.consumers")
).map(Parameters::new);
autoProviderInstructions = Optional.ofNullable(
bundleContext.getProperty("org.apache.aries.spifly.auto.providers")
).map(Parameters::new);
}
catch (Throwable t) {
log(Level.FINE, t.getMessage(), t);
}
providerBundleTracker = new BundleTracker(context,
Bundle.ACTIVE | Bundle.STARTING, new ProviderBundleTrackerCustomizer(this, context.getBundle()));
providerBundleTracker.open();
consumerBundleTracker = new BundleTracker(context,
Bundle.INSTALLED | Bundle.RESOLVED | Bundle.STARTING | Bundle.ACTIVE, new ConsumerBundleTrackerCustomizer(this, consumerHeaderName));
consumerBundleTracker.open();
for (Bundle bundle : context.getBundles()) {
addConsumerWeavingData(bundle, consumerHeaderName);
}
activator = this;
}
public void addConsumerWeavingData(Bundle bundle, String consumerHeaderName) throws Exception {
if (bundleWeavingData.containsKey(bundle)) {
// This bundle was already processed
return;
}
Map<String, List<String>> allHeaders = new HashMap<String, List<String>>();
Set<String> addedHeaders = new HashSet<String>();
List<String> added = allHeaders.put(consumerHeaderName, getAllHeaders(consumerHeaderName, bundle));
if (added != null) {
added.stream().forEach(addedHeaders::add);
}
added = allHeaders.put(SpiFlyConstants.REQUIRE_CAPABILITY, getAllHeaders(SpiFlyConstants.REQUIRE_CAPABILITY, bundle));
if (added != null) {
added.stream().forEach(addedHeaders::add);
}
if (addedHeaders.isEmpty()) {
getAutoConsumerInstructions().map(Parameters::stream).orElseGet(MapStream::empty).filterKey(
i -> Glob.toPattern(i).asPredicate().test(bundle.getSymbolicName())
).findFirst().ifPresent(
un -> allHeaders.put(
SpiFlyConstants.REQUIRE_CAPABILITY,
Arrays.asList(
SpiFlyConstants.CLIENT_REQUIREMENT.concat(",osgi.serviceloader;filter:='(osgi.serviceloader=*)'")))
);
}
Set<WeavingData> wd = new HashSet<WeavingData>();
for (Map.Entry<String, List<String>> entry : allHeaders.entrySet()) {
String headerName = entry.getKey();
for (String headerVal : entry.getValue()) {
wd.addAll(ConsumerHeaderProcessor.processHeader(headerName, headerVal));
}
}
if (!wd.isEmpty()) {
bundleWeavingData.put(bundle, Collections.unmodifiableSet(wd));
for (WeavingData w : wd) {
registerConsumerBundle(bundle, w.getArgRestrictions(), w.getAllowedBundles());
}
} else {
bundleWeavingData.put(bundle, NON_WOVEN_BUNDLE);
}
}
private List<String> getAllHeaders(String headerName, Bundle bundle) {
List<Bundle> bundlesFragments = new ArrayList<Bundle>();
bundlesFragments.add(bundle);
BundleRevision rev = bundle.adapt(BundleRevision.class);
if (rev != null) {
BundleWiring wiring = rev.getWiring();
if (wiring != null) {
for (BundleWire wire : wiring.getProvidedWires("osgi.wiring.host")) {
bundlesFragments.add(wire.getRequirement().getRevision().getBundle());
}
}
}
List<String> l = new ArrayList<String>();
for (Bundle bf : bundlesFragments) {
String header = bf.getHeaders().get(headerName);
if (header != null) {
l.add(header);
}
}
return l;
}
public void removeWeavingData(Bundle bundle) {
bundleWeavingData.remove(bundle);
consumerRestrictions.remove(bundle);
}
@Override
public synchronized void stop(BundleContext context) throws Exception {
activator = null;
consumerBundleTracker.close();
providerBundleTracker.close();
}
public boolean isLogEnabled(Level level) {
return logger.isLoggable(level);
}
public void log(int level, String message) {
log(level, message, null);
}
public void log(Level level, String message) {
log(level, message, null);
}
public void log(int level, String message, Throwable th) {
Level levelObject;
if (Level.ALL.intValue() == level) {
levelObject = Level.ALL;
}
else if (Level.CONFIG.intValue() == level) {
levelObject = Level.CONFIG;
}
else if (Level.FINE.intValue() == level) {
levelObject = Level.FINE;
}
else if (Level.FINER.intValue() == level) {
levelObject = Level.FINER;
}
else if (Level.FINEST.intValue() == level) {
levelObject = Level.FINEST;
}
else if (Level.INFO.intValue() == level) {
levelObject = Level.INFO;
}
else if (Level.SEVERE.intValue() == level) {
levelObject = Level.SEVERE;
}
else if (Level.WARNING.intValue() == level) {
levelObject = Level.WARNING;
}
else {
levelObject = Level.OFF;
}
log(levelObject, message, th);
}
public void log(Level level, String message, Throwable th) {
if (logger.isLoggable(level)) {
logger.log(level, message, th);
}
}
public Set<WeavingData> getWeavingData(Bundle b) {
// Simply return the value as it's already an immutable set.
Set<WeavingData> wd = bundleWeavingData.get(b);
if (wd == null)
return null;
if (wd.size() == 0)
return null;
return wd;
}
public void registerProviderBundle(String registrationClassName, Bundle bundle, Map<String, Object> customAttributes) {
SortedMap<Long, Pair<Bundle, Map<String, Object>>> map = registeredProviders.computeIfAbsent(registrationClassName,
k -> Collections.synchronizedSortedMap(new TreeMap<Long, Pair<Bundle, Map<String, Object>>>()));
map.compute(
bundle.getBundleId(),
(k,v) -> {
if (v == null) {
return new Pair<Bundle, Map<String, Object>>(bundle, customAttributes);
}
else {
v.getRight().putAll(customAttributes);
return v;
}
});
}
public void unregisterProviderBundle(Bundle bundle) {
for (Map<Long, Pair<Bundle, Map<String, Object>>> value : registeredProviders.values()) {
for(Iterator<Entry<Long, Pair<Bundle, Map<String, Object>>>> it = value.entrySet().iterator(); it.hasNext(); ) {
Entry<Long, Pair<Bundle, Map<String, Object>>> entry = it.next();
if (entry.getValue().getLeft().equals(bundle)) {
it.remove();
}
}
}
}
public Collection<Bundle> findProviderBundles(String name) {
SortedMap<Long, Pair<Bundle, Map<String, Object>>> map = registeredProviders.get(name);
if (map == null)
return Collections.emptyList();
List<Bundle> bundles = new ArrayList<Bundle>(map.size());
for(Pair<Bundle, Map<String, Object>> value : map.values()) {
bundles.add(value.getLeft());
}
return bundles;
}
public Map<String, Object> getCustomBundleAttributes(String name, Bundle b) {
SortedMap<Long, Pair<Bundle, Map<String, Object>>> map = registeredProviders.get(name);
if (map == null)
return Collections.emptyMap();
Pair<Bundle, Map<String, Object>> data = map.get(b.getBundleId());
if (data == null)
return Collections.emptyMap();
return data.getRight();
}
public void registerConsumerBundle(Bundle consumerBundle,
Set<ConsumerRestriction> restrictions, List<BundleDescriptor> allowedBundles) {
consumerRestrictions.putIfAbsent(consumerBundle, new HashMap<ConsumerRestriction, List<BundleDescriptor>>());
Map<ConsumerRestriction, List<BundleDescriptor>> map = consumerRestrictions.get(consumerBundle);
for (ConsumerRestriction restriction : restrictions) {
map.put(restriction, allowedBundles);
}
}
public Collection<Bundle> findConsumerRestrictions(Bundle consumer, String className, String methodName,
Map<Pair<Integer, String>, String> args) {
Map<ConsumerRestriction, List<BundleDescriptor>> restrictions = consumerRestrictions.get(consumer);
if (restrictions == null) {
// Null means: no restrictions
return null;
}
for (Map.Entry<ConsumerRestriction, List<BundleDescriptor>> entry : restrictions.entrySet()) {
if (entry.getKey().matches(className, methodName, args)) {
return getBundles(entry.getValue(), className, methodName, args);
}
}
// Empty collection: nothing matches
return Collections.emptySet();
}
public Optional<Parameters> getAutoConsumerInstructions() {
if (autoConsumerInstructions == null) return Optional.empty();
return autoConsumerInstructions;
}
public void setAutoConsumerInstructions(Optional<Parameters> autoConsumerInstructions) {
this.autoConsumerInstructions = autoConsumerInstructions;
}
public Optional<Parameters> getAutoProviderInstructions() {
if (autoProviderInstructions == null) return Optional.empty();
return autoProviderInstructions;
}
public void setAutoProviderInstructions(Optional<Parameters> autoProviderInstructions) {
this.autoProviderInstructions = autoProviderInstructions;
}
private Collection<Bundle> getBundles(List<BundleDescriptor> descriptors, String className, String methodName,
Map<Pair<Integer, String>, String> args) {
if (descriptors == null) {
return null;
}
List<Bundle> bundles = new ArrayList<Bundle>();
for (Bundle b : bundleContext.getBundles()) {
for (BundleDescriptor desc : descriptors) {
if (desc.getBundleID() != BundleDescriptor.BUNDLE_ID_UNSPECIFIED) {
if (b.getBundleId() == desc.getBundleID()) {
bundles.add(b);
}
} else if (desc.getFilter() != null) {
Hashtable<String, Object> d = new Hashtable<String, Object>();
if (ServiceLoader.class.getName().equals(className) &&
"load".equals(methodName)) {
String type = args.get(new Pair<Integer, String>(0, Class.class.getName()));
if (type != null) {
d.put(SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE, type);
d.putAll(getCustomBundleAttributes(type, b));
}
}
if (desc.getFilter().match(d))
bundles.add(b);
} else {
if (b.getSymbolicName().equals(desc.getSymbolicName())) {
if (desc.getVersion() == null || b.getVersion().equals(desc.getVersion())) {
bundles.add(b);
}
}
}
}
}
return bundles;
}
}
| 9,710 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/Util.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URL;
import java.security.AccessControlException;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import java.util.logging.Level;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleReference;
import org.osgi.framework.Constants;
import org.osgi.framework.ServicePermission;
/**
* Methods used from ASM-generated code. They store, change and reset the thread context classloader.
* The methods are static to make it easy to access them from generated code.
*/
public class Util {
static ThreadLocal<ClassLoader> storedClassLoaders = new ThreadLocal<ClassLoader>();
// Provided as static method to make it easier to call from ASM-modified code
public static void storeContextClassloader() {
AccessController.doPrivileged(new PrivilegedAction<Void>() {
@Override
public Void run() {
storedClassLoaders.set(Thread.currentThread().getContextClassLoader());
return null;
}
});
}
// Provided as static method to make it easier to call from ASM-modified code
public static void restoreContextClassloader() {
AccessController.doPrivileged(new PrivilegedAction<Void>() {
@Override
public Void run() {
Thread.currentThread().setContextClassLoader(storedClassLoaders.get());
storedClassLoaders.set(null);
return null;
}
});
}
public static <C,S> ServiceLoader<S> serviceLoaderLoad(Class<S> service, Class<C> caller) {
if (BaseActivator.activator == null) {
// The system is not yet initialized. We can't do anything.
return null;
}
ClassLoader bundleLoader = AccessController.doPrivileged(
new PrivilegedAction<ClassLoader>() {
@Override
public ClassLoader run() {
return caller.getClassLoader();
}
}
);
if (!(bundleLoader instanceof BundleReference)) {
BaseActivator.activator.log(Level.FINE, "Classloader of consuming bundle doesn't implement BundleReference: " + bundleLoader);
return ServiceLoader.load(service);
}
BundleReference bundleReference = (BundleReference)bundleLoader;
final ClassLoader bundleClassloader = findContextClassloader(
bundleReference.getBundle(), ServiceLoader.class.getName(), "load", service);
if (bundleClassloader == null) {
return ServiceLoader.load(service);
}
Thread thread = Thread.currentThread();
return AccessController.doPrivileged(
new PrivilegedAction<ServiceLoader<S>>() {
@Override
public ServiceLoader<S> run() {
ClassLoader contextClassLoader = thread.getContextClassLoader();
try {
thread.setContextClassLoader(bundleClassloader);
return ServiceLoader.load(service);
}
finally {
thread.setContextClassLoader(contextClassLoader);
}
}
}
);
}
public static <C,S> ServiceLoader<S> serviceLoaderLoad(
Class<S> service, ClassLoader specifiedClassLoader, Class<C> caller) {
if (BaseActivator.activator == null) {
// The system is not yet initialized. We can't do anything.
return null;
}
ClassLoader bundleLoader = AccessController.doPrivileged(
new PrivilegedAction<ClassLoader>() {
@Override
public ClassLoader run() {
return caller.getClassLoader();
}
}
);
if (!(bundleLoader instanceof BundleReference)) {
BaseActivator.activator.log(Level.FINE, "Classloader of consuming bundle doesn't implement BundleReference: " + bundleLoader);
return ServiceLoader.load(service, specifiedClassLoader);
}
BundleReference bundleReference = (BundleReference)bundleLoader;
final ClassLoader bundleClassloader = findContextClassloader(
bundleReference.getBundle(), ServiceLoader.class.getName(), "load", service);
if (bundleClassloader == null) {
return ServiceLoader.load(service, specifiedClassLoader);
}
return ServiceLoader.load(service, new WrapperCL(specifiedClassLoader, bundleClassloader));
}
public static void fixContextClassloader(String cls, String method, Class<?> clsArg, ClassLoader bundleLoader) {
BundleReference br = getBundleReference(bundleLoader);
if (br == null) {
return;
}
final ClassLoader cl = findContextClassloader(br.getBundle(), cls, method, clsArg);
if (cl != null) {
BaseActivator.activator.log(Level.FINE, "Temporarily setting Thread Context Classloader to: " + cl);
AccessController.doPrivileged(new PrivilegedAction<Void>() {
@Override
public Void run() {
Thread.currentThread().setContextClassLoader(cl);
return null;
}
});
} else {
BaseActivator.activator.log(Level.FINE, "No classloader found for " + cls + ":" + method + "(" + clsArg + ")");
}
}
private static ClassLoader findContextClassloader(Bundle consumerBundle, String className, String methodName, Class<?> clsArg) {
BaseActivator activator = BaseActivator.activator;
String requestedClass;
Map<Pair<Integer, String>, String> args;
if (ServiceLoader.class.getName().equals(className) && "load".equals(methodName)) {
requestedClass = clsArg.getName();
args = new HashMap<Pair<Integer,String>, String>();
args.put(new Pair<Integer, String>(0, Class.class.getName()), requestedClass);
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
try {
sm.checkPermission(new ServicePermission(requestedClass, ServicePermission.GET));
} catch (AccessControlException ace) {
// access denied
activator.log(Level.FINE, "No permission to obtain service of type: " + requestedClass);
return null;
}
}
} else {
requestedClass = className;
args = null; // only supported on ServiceLoader.load() at the moment
}
Collection<Bundle> bundles = new ArrayList<Bundle>(activator.findProviderBundles(requestedClass));
activator.log(Level.FINE, "Found bundles providing " + requestedClass + ": " + bundles);
Collection<Bundle> allowedBundles = activator.findConsumerRestrictions(consumerBundle, className, methodName, args);
if (allowedBundles != null) {
for (Iterator<Bundle> it = bundles.iterator(); it.hasNext(); ) {
if (!allowedBundles.contains(it.next())) {
it.remove();
}
}
}
switch (bundles.size()) {
case 0:
return null;
case 1:
Bundle bundle = bundles.iterator().next();
return getBundleClassLoader(bundle);
default:
List<ClassLoader> loaders = new ArrayList<ClassLoader>();
for (Bundle b : bundles) {
loaders.add(getBundleClassLoader(b));
}
return new MultiDelegationClassloader(loaders.toArray(new ClassLoader[loaders.size()]));
}
}
private static ClassLoader getBundleClassLoader(final Bundle b) {
return AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
@Override
public ClassLoader run() {
return getBundleClassLoaderPrivileged(b);
}
});
}
private static ClassLoader getBundleClassLoaderPrivileged(Bundle b) {
// In 4.3 this can be done much easier by using the BundleWiring, but we want this code to
// be 4.2 compliant.
// Here we're just finding any class in the bundle, load that and then use its classloader.
try {
Method adaptMethod = Bundle.class.getMethod("adapt", Class.class);
if (adaptMethod != null) {
return getBundleClassLoaderViaAdapt(b, adaptMethod);
}
} catch (Exception e) {
// No Bundle.adapt(), use the fallback approach to find the bundle classloader
}
List<String> rootPaths = new ArrayList<String>();
rootPaths.add("/");
while(rootPaths.size() > 0) {
String rootPath = rootPaths.remove(0);
Enumeration<String> paths = b.getEntryPaths(rootPath);
while(paths != null && paths.hasMoreElements()) {
String path = paths.nextElement();
if (path.endsWith(".class")) {
ClassLoader cl = getClassLoaderFromClassResource(b, path);
if (cl != null)
return cl;
} else if (path.endsWith("/")) {
rootPaths.add(path);
}
}
}
// if we can't find any classes in the bundle directly, try the Bundle-ClassPath
Object bcp = b.getHeaders().get(Constants.BUNDLE_CLASSPATH);
if (bcp instanceof String) {
for (String entry : ((String) bcp).split(",")) {
entry = entry.trim();
if (entry.equals("."))
continue;
URL url = b.getResource(entry);
if (url != null) {
ClassLoader cl = getClassLoaderViaBundleClassPath(b, url);
if (cl != null)
return cl;
}
}
}
throw new RuntimeException("Could not obtain classloader for bundle " + b);
}
private static ClassLoader getBundleClassLoaderViaAdapt(Bundle b, Method adaptMethod) {
// This method uses reflection to avoid a hard dependency on OSGi 4.3 APIs
try {
// Load the BundleRevision and BundleWiring classes from the System Bundle.
Bundle systemBundle = b.getBundleContext().getBundle(0);
Class<?> bundleRevisionClass = systemBundle.loadClass("org.osgi.framework.wiring.BundleRevision");
Object bundleRevision = adaptMethod.invoke(b, bundleRevisionClass);
Method getWiringMethod = bundleRevisionClass.getDeclaredMethod("getWiring");
Object bundleWiring = getWiringMethod.invoke(bundleRevision);
Class<?> bundleWiringClass = systemBundle.loadClass("org.osgi.framework.wiring.BundleWiring");
Method getClassLoaderMethod = bundleWiringClass.getDeclaredMethod("getClassLoader");
return (ClassLoader) getClassLoaderMethod.invoke(bundleWiring);
} catch (Exception e) {
throw new RuntimeException("Can't obtain Bundle Class Loader for bundle: " + b, e);
}
}
private static BundleReference getBundleReference(ClassLoader bundleLoader) {
if (BaseActivator.activator == null) {
// The system is not yet initialized. We can't do anything.
return null;
}
if (!(bundleLoader instanceof BundleReference)) {
BaseActivator.activator.log(Level.FINE, "Classloader of consuming bundle doesn't implement BundleReference: " + bundleLoader);
return null;
}
return (BundleReference) bundleLoader;
}
private static ClassLoader getClassLoaderViaBundleClassPath(Bundle b, URL url) {
try {
JarInputStream jis = null;
try {
jis = new JarInputStream(url.openStream());
JarEntry je = null;
while ((je = jis.getNextJarEntry()) != null) {
String path = je.getName();
if (path.endsWith(".class")) {
ClassLoader cl = getClassLoaderFromClassResource(b, path);
if (cl != null)
return cl;
}
}
} finally {
if (jis != null)
jis.close();
}
} catch (IOException e) {
BaseActivator.activator.log(Level.FINE, "Problem loading class from embedded jar file: " + url +
" in bundle " + b.getSymbolicName(), e);
}
return null;
}
private static ClassLoader getClassLoaderFromClassResource(Bundle b, String path) {
String className = path.substring(0, path.length() - ".class".length());
if (className.startsWith("/"))
className = className.substring(1);
className = className.replace('/', '.');
try {
Class<?> cls = b.loadClass(className);
return cls.getClassLoader();
} catch (ClassNotFoundException e) {
// try the next class
}
return null;
}
private static class WrapperCL extends ClassLoader {
private final ClassLoader bundleClassloader;
public WrapperCL(ClassLoader specifiedClassLoader, ClassLoader bundleClassloader) {
super(specifiedClassLoader);
this.bundleClassloader = bundleClassloader;
}
@Override
protected Class<?> findClass(String name) throws ClassNotFoundException {
return bundleClassloader.loadClass(name);
}
@Override
protected URL findResource(String name) {
return bundleClassloader.getResource(name);
}
@Override
protected Enumeration<URL> findResources(String name) throws IOException {
return bundleClassloader.getResources(name);
}
}
}
| 9,711 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/WeavingData.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Set;
/** Contains information needed for the byte code weaver.
*/
public class WeavingData {
private final String className;
private final String methodName;
private final String[] argClasses;
private final Set<ConsumerRestriction> argRestrictions;
private final List<BundleDescriptor> allowedBundles;
private final String _string;
/**
* Constructor.
* @param className The class name of the call that needs to be woven.
* @param methodName The method name of the call that needs to be woven.
* @param argClasses The overload (class names of the signature) of the call
* that needs to be woven. If <code>null</code> then all overloads of the method
* need to be woven.
* @param argRestrictions A set of Consumer Restrictions.
* @param allowedBundles The list of allowed bundles.
*/
public WeavingData(String className, String methodName, String[] argClasses, Set<ConsumerRestriction> argRestrictions, List<BundleDescriptor> allowedBundles) {
// TODO can we infer argClasses from restrictions?
this.className = className;
this.methodName = methodName;
this.argClasses = argClasses;
this.argRestrictions = argRestrictions;
this.allowedBundles = allowedBundles;
StringBuffer sb = new StringBuffer();
sb.append("{WeavingData: {className: '");
sb.append(getClassName());
sb.append("', methodName: '");
sb.append(getMethodName());
sb.append("', arguments: [");
String prefix = "";
for (String arg : (argClasses != null) ? argClasses : new String[0]) {
sb.append(prefix);
sb.append(arg);
prefix = ",";
}
sb.append("], allowedBundles: [");
prefix = "{";
for (BundleDescriptor descriptor : (allowedBundles != null) ? allowedBundles : Collections.<BundleDescriptor>emptyList()) {
sb.append(prefix);
sb.append("symbolicName: '");
sb.append(descriptor.getSymbolicName());
sb.append("', id: '");
sb.append(descriptor.getBundleID());
sb.append("'}");
prefix = ", {";
}
sb.append("]}}");
this._string = sb.toString();
}
public String getClassName() {
return className;
}
public String getMethodName() {
return methodName;
}
public List<BundleDescriptor> getAllowedBundles() {
return allowedBundles;
}
public String[] getArgClasses() {
return argClasses;
}
public Set<ConsumerRestriction> getArgRestrictions() {
return argRestrictions;
}
@Override
public String toString() {
return _string;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(argClasses);
result = prime * result + ((className == null) ? 0 : className.hashCode());
result = prime * result + ((methodName == null) ? 0 : methodName.hashCode());
result = prime * result + ((argRestrictions == null) ? 0 : argRestrictions.hashCode());
result = prime * result + ((allowedBundles == null) ? 0 : allowedBundles.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
WeavingData other = (WeavingData) obj;
if (!Arrays.equals(argClasses, other.argClasses))
return false;
if (className == null) {
if (other.className != null)
return false;
} else if (!className.equals(other.className))
return false;
if (methodName == null) {
if (other.methodName != null)
return false;
} else if (!methodName.equals(other.methodName))
return false;
if (argRestrictions == null) {
if (other.argRestrictions != null)
return false;
} else if (!argRestrictions.equals(other.argRestrictions))
return false;
if (allowedBundles == null) {
if (other.allowedBundles != null)
return false;
} else if (!allowedBundles.equals(other.allowedBundles))
return false;
return true;
}
}
| 9,712 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/ProviderPrototypeServiceFactory.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import org.osgi.framework.Bundle;
import org.osgi.framework.PrototypeServiceFactory;
import org.osgi.framework.ServiceRegistration;
public class ProviderPrototypeServiceFactory implements PrototypeServiceFactory {
private final Class<?> providerClass;
public ProviderPrototypeServiceFactory(Class<?> cls) {
providerClass = cls;
}
@Override
public Object getService(Bundle bundle, ServiceRegistration registration) {
try {
return providerClass.getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new RuntimeException("Unable to instantiate class " + providerClass +
" Does it have a public no-arg constructor?", e);
}
}
@Override
public void ungetService(Bundle bundle, ServiceRegistration registration, Object service) {
// nothing to do
}
}
| 9,713 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/Pair.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
/**
* A simple holder object for a pair of objects.
*/
public class Pair <A, B> {
private final A left;
private final B right;
public Pair(A left, B right) {
this.left = left;
this.right = right;
}
public A getLeft() {
return left;
}
public B getRight() {
return right;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((left == null) ? 0 : left.hashCode());
result = prime * result + ((right == null) ? 0 : right.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Pair<?, ?> other = (Pair<?, ?>) obj;
if (left == null) {
if (other.left != null)
return false;
} else if (!left.equals(other.left))
return false;
if (right == null) {
if (other.right != null)
return false;
} else if (!right.equals(other.right))
return false;
return true;
}
@Override
public String toString() {
return "Pair [left=" + left + ", right=" + right + "]";
}
}
| 9,714 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/ConsumerRestriction.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import java.util.Map;
public class ConsumerRestriction {
private final String className;
private final MethodRestriction methodRestriction;
public ConsumerRestriction(String className, MethodRestriction methodRestriction) {
this.className = className;
this.methodRestriction = methodRestriction;
}
public String getClassName() {
return className;
}
public MethodRestriction getMethodRestriction() {
return methodRestriction;
}
public MethodRestriction getMethodRestriction(String methodName) {
if (methodName.equals(methodRestriction.getMethodName())) {
return methodRestriction;
} else {
return null;
}
}
public boolean matches(String clsName, String mtdName, Map<Pair<Integer, String>, String> args) {
if (!className.equals(clsName))
return false;
return methodRestriction.matches(mtdName, args);
}
}
| 9,715 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/ProviderServiceFactory.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import org.osgi.framework.Bundle;
import org.osgi.framework.ServiceFactory;
import org.osgi.framework.ServiceRegistration;
public class ProviderServiceFactory implements ServiceFactory {
private final Class<?> providerClass;
public ProviderServiceFactory(Class<?> cls) {
providerClass = cls;
}
@Override
public Object getService(Bundle bundle, ServiceRegistration registration) {
try {
return providerClass.getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new RuntimeException("Unable to instantiate class " + providerClass +
" Does it have a public no-arg constructor?", e);
}
}
@Override
public void ungetService(Bundle bundle, ServiceRegistration registration, Object service) {
// nothing to do
}
}
| 9,716 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/MultiDelegationClassloader.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
/** A classloader that delegates to a number of other classloaders.
* This classloader can be used if a single classloader is needed that has
* vibisility of a number of other classloaders. For example if a Thread Context
* Classloader is needed that has visibility of a number of bundles so that
* ServiceLoader.load() can find all the services provided by these bundles.
*/
public class MultiDelegationClassloader extends ClassLoader {
private final ClassLoader[] delegates;
public MultiDelegationClassloader(ClassLoader ... classLoaders) {
if (classLoaders == null)
throw new NullPointerException();
delegates = classLoaders.clone();
}
@Override
public URL getResource(String name) {
for (ClassLoader cl : delegates) {
URL res = cl.getResource(name);
if (res != null)
return res;
}
return null;
}
@Override
public InputStream getResourceAsStream(String name) {
for (ClassLoader cl : delegates) {
InputStream is = cl.getResourceAsStream(name);
if (is != null)
return is;
}
return null;
}
@Override
public Enumeration<URL> getResources(String name) throws IOException {
List<URL> urls = new ArrayList<URL>();
for (ClassLoader cl : delegates) {
urls.addAll(Collections.list(cl.getResources(name)));
}
return Collections.enumeration(urls);
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
ClassNotFoundException lastEx = null;
for (ClassLoader cl : delegates) {
try {
return cl.loadClass(name);
} catch (ClassNotFoundException e) {
lastEx = e;
}
}
throw lastEx;
}
}
| 9,717 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/MethodRestriction.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import java.util.Map;
public class MethodRestriction {
private final String methodName;
private final ArgRestrictions argRestrictions;
public MethodRestriction(String methodName) {
this(methodName, null);
}
public MethodRestriction(String methodName, ArgRestrictions argRestrictions) {
this.methodName = methodName;
this.argRestrictions = argRestrictions;
}
public String getMethodName() {
return methodName;
}
public String[] getArgClasses() {
if (argRestrictions == null) {
return null;
}
return argRestrictions.getArgClasses();
}
public boolean matches(String mtdName, Map<Pair<Integer, String>, String> args) {
if (!methodName.equals(mtdName))
return false;
if (args == null)
return true;
if (argRestrictions == null)
return true;
return argRestrictions.matches(args);
}
}
| 9,718 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/ConsumerHeaderProcessor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Dictionary;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.ServiceLoader;
import java.util.Set;
import org.apache.aries.spifly.HeaderParser.PathElement;
import org.osgi.framework.Filter;
import org.osgi.framework.FrameworkUtil;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.Version;
import aQute.bnd.header.OSGiHeader;
import aQute.bnd.header.Parameters;
public class ConsumerHeaderProcessor {
private static final Dictionary<String, String> PROCESSOR_FILTER_MATCH;
static {
PROCESSOR_FILTER_MATCH = new Hashtable<String, String>();
PROCESSOR_FILTER_MATCH.put(SpiFlyConstants.EXTENDER_CAPABILITY_NAMESPACE, SpiFlyConstants.PROCESSOR_EXTENDER_NAME);
}
/**
* Parses headers of the following syntax:
* <ul>
* <li><tt>org.acme.MyClass#myMethod</tt> - apply the weaving to all overloads of <tt>myMethod()</tt>
* in <tt>MyClass</tt>
* <li><tt>org.acme.MyClass#myMethod(java.lang.String, java.util.List)</tt> - apply the weaving only
* to the <tt>myMethod(String, List)</tt> overload in <tt>MyClass</tt>
* <li><tt>org.acme.MyClass#myMethod()</tt> - apply the weaving only to the noarg overload of
* <tt>myMethod()</tt>
* <li><b>true</b> - equivalent to <tt>java.util.ServiceLoader#load(java.lang.Class)</tt>
* </ul>
* Additionally, it registers the consumer's constraints with the consumer registry in the activator, if the
* consumer is only constrained to a certain set of bundles.<p>
*
* The following attributes are supported:
* <ul>
* <li><tt>bundle</tt> - restrict wiring to the bundle with the specifies Symbolic Name. The attribute value
* is a list of bundle identifiers separated by a '|' sign. The bundle identifier starts with the Symbolic name
* and can optionally contain a version suffix. E.g. bundle=impl2:version=1.2.3 or bundle=impl2|impl4.
* <li><tt>bundleId</tt> - restrict wiring to the bundle with the specified bundle ID. Typically used when
* the service should be forcibly picked up from the system bundle (<tt>bundleId=0</tt>). Multiple bundle IDs
* can be specified separated by a '|' sign.
* </ul>
*
* @param consumerHeaderName the name of the header (either Require-Capability or SPI-Consumer)
* @param consumerHeader the <tt>SPI-Consumer</tt> header.
* @return an instance of the {@link WeavingData} class.
* @throws Exception when a header cannot be parsed.
*/
public static Set<WeavingData> processHeader(String consumerHeaderName, String consumerHeader) throws Exception {
if (SpiFlyConstants.REQUIRE_CAPABILITY.equals(consumerHeaderName)) {
return processRequireCapabilityHeader(consumerHeader);
}
Set<WeavingData> weavingData = new HashSet<WeavingData>();
for (PathElement element : HeaderParser.parseHeader(consumerHeader)) {
List<BundleDescriptor> allowedBundles = new ArrayList<BundleDescriptor>();
String name = element.getName().trim();
String className;
String methodName;
MethodRestriction methodRestriction;
boolean serviceLoader = false;
int hashIdx = name.indexOf('#');
if (hashIdx > 0) {
className = name.substring(0, hashIdx);
int braceIdx = name.substring(hashIdx).indexOf('(');
if (braceIdx > 0) {
methodName = name.substring(hashIdx + 1, hashIdx + braceIdx);
ArgRestrictions argRestrictions = new ArgRestrictions();
int closeIdx = name.substring(hashIdx).indexOf(')');
if (closeIdx > 0) {
String classes = name.substring(hashIdx + braceIdx + 1, hashIdx + closeIdx).trim();
if (classes.length() > 0) {
if (classes.indexOf('[') > 0) {
int argNumber = 0;
for (String s : classes.split(",")) {
int idx = s.indexOf('[');
int end = s.indexOf(']', idx);
if (idx > 0 && end > idx) {
argRestrictions.addRestriction(argNumber, s.substring(0, idx), s.substring(idx + 1, end));
} else {
argRestrictions.addRestriction(argNumber, s);
}
argNumber++;
}
} else {
String[] classNames = classes.split(",");
for (int i = 0; i < classNames.length; i++) {
argRestrictions.addRestriction(i, classNames[i]);
}
}
} else {
argRestrictions = null;
}
}
methodRestriction = new MethodRestriction(methodName, argRestrictions);
} else {
methodName = name.substring(hashIdx + 1);
methodRestriction = new MethodRestriction(methodName);
}
} else {
if ("*".equalsIgnoreCase(name)) {
serviceLoader = true;
className = ServiceLoader.class.getName();
methodName = "load";
ArgRestrictions argRestrictions = new ArgRestrictions();
argRestrictions.addRestriction(0, Class.class.getName());
methodRestriction = new MethodRestriction(methodName, argRestrictions);
} else {
throw new IllegalArgumentException("Must at least specify class name and method name: " + name);
}
}
String bsn = element.getAttribute("bundle");
if (bsn != null) {
bsn = bsn.trim();
if (bsn.length() > 0) {
for (String s : bsn.split("\\|")) {
int colonIdx = s.indexOf(':');
if (colonIdx > 0) {
String sn = s.substring(0, colonIdx);
String versionSfx = s.substring(colonIdx + 1);
if (versionSfx.startsWith("version=")) {
allowedBundles.add(new BundleDescriptor(sn,
Version.parseVersion(versionSfx.substring("version=".length()))));
} else {
allowedBundles.add(new BundleDescriptor(sn));
}
} else {
allowedBundles.add(new BundleDescriptor(s));
}
}
}
}
String bid = element.getAttribute("bundleId");
if (bid != null) {
bid = bid.trim();
if (bid.length() > 0) {
for (String s : bid.split("\\|")) {
allowedBundles.add(new BundleDescriptor(Long.parseLong(s)));
}
}
}
weavingData.add(createWeavingData(className, methodName, methodRestriction, allowedBundles));
if (serviceLoader) {
className = ServiceLoader.class.getName();
methodName = "load";
ArgRestrictions argRestrictions = new ArgRestrictions();
argRestrictions.addRestriction(0, Class.class.getName());
argRestrictions.addRestriction(1, ClassLoader.class.getName());
methodRestriction = new MethodRestriction(methodName, argRestrictions);
weavingData.add(createWeavingData(className, methodName, methodRestriction, allowedBundles));
}
}
return weavingData;
}
private static Set<WeavingData> processRequireCapabilityHeader(String consumerHeader) throws InvalidSyntaxException {
Set<WeavingData> weavingData = new HashSet<WeavingData>();
Parameters requirements = OSGiHeader.parseHeader(consumerHeader);
Entry<String, ? extends Map<String, String>> extenderRequirement = findRequirement(requirements, SpiFlyConstants.EXTENDER_CAPABILITY_NAMESPACE, SpiFlyConstants.PROCESSOR_EXTENDER_NAME);
Collection<Entry<String, ? extends Map<String, String>>> serviceLoaderRequirements = findAllMetadata(requirements, SpiFlyConstants.SERVICELOADER_CAPABILITY_NAMESPACE);
if (extenderRequirement != null) {
List<BundleDescriptor> allowedBundles = new ArrayList<BundleDescriptor>();
for (Entry<String, ? extends Map<String, String>> req : serviceLoaderRequirements) {
String slFilterString = req.getValue().get(SpiFlyConstants.FILTER_DIRECTIVE);
if (slFilterString != null) {
Filter slFilter = FrameworkUtil.createFilter(slFilterString);
allowedBundles.add(new BundleDescriptor(slFilter));
}
}
// ServiceLoader.load(Class)
{
ArgRestrictions ar = new ArgRestrictions();
ar.addRestriction(0, Class.class.getName());
MethodRestriction mr = new MethodRestriction("load", ar);
weavingData.add(createWeavingData(ServiceLoader.class.getName(), "load", mr, allowedBundles));
}
// ServiceLoader.load(Class, ClassLoader)
{
ArgRestrictions ar = new ArgRestrictions();
ar.addRestriction(0, Class.class.getName());
ar.addRestriction(1, ClassLoader.class.getName());
MethodRestriction mr = new MethodRestriction("load", ar);
weavingData.add(createWeavingData(ServiceLoader.class.getName(), "load", mr, allowedBundles));
}
}
return weavingData;
}
private static WeavingData createWeavingData(String className, String methodName,
MethodRestriction methodRestriction, List<BundleDescriptor> allowedBundles) {
ConsumerRestriction restriction = new ConsumerRestriction(className, methodRestriction);
// TODO is this correct? Why is it added to a set?
Set<ConsumerRestriction> restrictions = new HashSet<ConsumerRestriction>();
restrictions.add(restriction);
// TODO this can be done in the WeavingData itself?
String[] argClasses = restriction.getMethodRestriction(methodName).getArgClasses();
return new WeavingData(className, methodName, argClasses, restrictions,
allowedBundles.size() == 0 ? null : allowedBundles);
}
static Entry<String, ? extends Map<String, String>> findCapability(Parameters capabilities, String namespace, String spiName) {
for (Entry<String, ? extends Map<String, String>> cap : capabilities.entrySet()) {
String key = removeDuplicateMarker(cap.getKey());
if (namespace.equals(key)) {
if (spiName.equals(cap.getValue().get(namespace))) {
return cap;
}
}
}
return null;
}
static Entry<String, ? extends Map<String, String>> findRequirement(Parameters requirements, String namespace, String type) throws InvalidSyntaxException {
Dictionary<String, Object> nsAttr = new Hashtable<>();
nsAttr.put(namespace, type);
nsAttr.put("version", SpiFlyConstants.SPECIFICATION_VERSION);
for (Entry<String, ? extends Map<String, String>> req : requirements.entrySet()) {
String key = removeDuplicateMarker(req.getKey());
if (namespace.equals(key)) {
String filterString = req.getValue().get(SpiFlyConstants.FILTER_DIRECTIVE);
if (filterString != null) {
Filter filter = FrameworkUtil.createFilter(filterString);
if (filter.match(nsAttr)) {
return req;
}
}
}
}
return null;
}
static Collection<Entry<String, ? extends Map<String, String>>> findAllMetadata(Parameters requirementsOrCapabilities, String namespace) {
List<Entry<String, ? extends Map<String, String>>> reqsCaps = new ArrayList<>();
for (Entry<String, ? extends Map<String, String>> reqCap : requirementsOrCapabilities.entrySet()) {
String key = removeDuplicateMarker(reqCap.getKey());
if (namespace.equals(key)) {
reqsCaps.add(reqCap);
}
}
return reqsCaps;
}
static String removeDuplicateMarker(String key) {
int i = key.length() - 1;
while (i >= 0 && key.charAt(i) == '~')
--i;
return key.substring(0, i + 1);
}
}
| 9,719 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/HeaderParser.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Utility class to parse a standard OSGi header with paths.
*
* Stolen from Aries Blueprint Core (blueprint.utils). Need to give it back!
*/
public class HeaderParser {
private HeaderParser() {}
/**
* Parse a given OSGi header into a list of paths
*
* @param header the OSGi header to parse
* @return the list of paths extracted from this header
*/
public static List<PathElement> parseHeader(String header) {
List<PathElement> elements = new ArrayList<PathElement>();
if (header == null || header.trim().length() == 0) {
return elements;
}
String[] clauses = header.split(",(?![^\\(\\)]*(\\)))");
for (String clause : clauses) {
String[] tokens = clause.split(";");
if (tokens.length < 1) {
throw new IllegalArgumentException("Invalid header clause: " + clause);
}
PathElement elem = new PathElement(tokens[0].trim());
elements.add(elem);
for (int i = 1; i < tokens.length; i++) {
int pos = tokens[i].indexOf('=');
if (pos != -1) {
if (pos > 0 && tokens[i].charAt(pos - 1) == ':') {
String name = tokens[i].substring(0, pos - 1).trim();
String value = tokens[i].substring(pos + 1).trim();
elem.addDirective(name, value);
} else {
String name = tokens[i].substring(0, pos).trim();
String value = tokens[i].substring(pos + 1).trim();
elem.addAttribute(name, value);
}
} else {
elem = new PathElement(tokens[i].trim());
elements.add(elem);
}
}
}
return elements;
}
public static class PathElement {
private String path;
private Map<String, String> attributes;
private Map<String, String> directives;
public PathElement(String path) {
this.path = path;
this.attributes = new HashMap<String, String>();
this.directives = new HashMap<String, String>();
}
public String getName() {
return this.path;
}
public Map<String, String> getAttributes() {
return attributes;
}
public String getAttribute(String name) {
return attributes.get(name);
}
public void addAttribute(String name, String value) {
attributes.put(name, value);
}
public Map<String, String> getDirectives() {
return directives;
}
public String getDirective(String name) {
return directives.get(name);
}
public void addDirective(String name, String value) {
directives.put(name, value);
}
}
}
| 9,720 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/ArgRestrictions.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ArgRestrictions {
private final Map<Pair<Integer, String>, List<String>> restrictions =
new HashMap<Pair<Integer, String>, List<String>>();
public void addRestriction(int argNumber, String className) {
addRestriction(argNumber, className, null);
}
public void addRestriction(int argNumber, String className, String allowedArgValue) {
Pair<Integer, String> key = new Pair<Integer, String>(argNumber, className);
List<String> allowedValues = restrictions.get(key);
if (allowedArgValue != null) {
if (allowedValues == null) {
allowedValues = new ArrayList<String>();
restrictions.put(key, allowedValues);
}
allowedValues.add(allowedArgValue);
}
restrictions.put(key, allowedValues);
}
public String[] getArgClasses() {
List<String> classes = new ArrayList<String>();
for (Pair<Integer, String> key : restrictions.keySet()) {
classes.add(key.getRight());
}
if (classes.size() == 0)
return null;
return classes.toArray(new String [classes.size()]);
}
public boolean matches(Map<Pair<Integer, String>, String> args) {
for (Pair<Integer, String> key : args.keySet()) {
if (!restrictions.containsKey(key)) {
return false;
}
List<String> values = restrictions.get(key);
if (values != null) {
String val = args.get(key);
if (!values.contains(val)) {
return false;
}
}
}
return true;
}
}
| 9,721 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/SpiFlyConstants.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import org.osgi.framework.Version;
public interface SpiFlyConstants {
String SPECIFICATION_VERSION_STRING = "1.0.0";
Version SPECIFICATION_VERSION = new Version(SPECIFICATION_VERSION_STRING);
// Not taken from OSGi Constants because this code needs to compile with the 4.2 OSGi classes.
String PROVIDE_CAPABILITY = "Provide-Capability";
String REQUIRE_CAPABILITY = "Require-Capability";
String EXTENDER_CAPABILITY_NAMESPACE = "osgi.extender";
String FILTER_DIRECTIVE = "filter:";
// These are two proprietary headers which predated the ServiceLoader Mediator
// specification and are more powerful than what is specified there
String SPI_CONSUMER_HEADER = "SPI-Consumer";
String SPI_PROVIDER_HEADER = "SPI-Provider";
// ServiceLoader capability and related directive
String SERVICELOADER_CAPABILITY_NAMESPACE = "osgi.serviceloader";
String REGISTER_DIRECTIVE = "register:";
// Service registration property
String SERVICELOADER_MEDIATOR_PROPERTY = "serviceloader.mediator";
String PROVIDER_IMPLCLASS_PROPERTY = ".org.apache.aries.spifly.provider.implclass";
String PROVIDER_DISCOVERY_MODE = ".org.apache.aries.spifly.provider.discovery.mode";
// The names of the extenders involved
String PROCESSOR_EXTENDER_NAME = "osgi.serviceloader.processor";
String REGISTRAR_EXTENDER_NAME = "osgi.serviceloader.registrar";
// Pre-baked requirements for consumer and provider
String CLIENT_REQUIREMENT = EXTENDER_CAPABILITY_NAMESPACE + "; " + FILTER_DIRECTIVE +
"=\"(" + EXTENDER_CAPABILITY_NAMESPACE + "=" + PROCESSOR_EXTENDER_NAME + ")\"";
String PROVIDER_REQUIREMENT = EXTENDER_CAPABILITY_NAMESPACE + "; " + FILTER_DIRECTIVE +
"=\"(" + EXTENDER_CAPABILITY_NAMESPACE + "=" + REGISTRAR_EXTENDER_NAME + ")\"";
String PROCESSED_SPI_CONSUMER_HEADER = "X-SpiFly-Processed-SPI-Consumer";
}
| 9,722 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/Streams.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class Streams {
private Streams() {}
public static void pump(InputStream is, OutputStream os) throws IOException {
byte[] bytes = new byte[8192];
int length = 0;
int offset = 0;
while ((length = is.read(bytes, offset, bytes.length - offset)) != -1) {
offset += length;
if (offset == bytes.length) {
os.write(bytes, 0, bytes.length);
offset = 0;
}
}
if (offset != 0) {
os.write(bytes, 0, offset);
}
}
public static byte [] suck(InputStream is) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
pump(is, baos);
return baos.toByteArray();
} finally {
is.close();
}
}
}
| 9,723 |
0 | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries | Create_ds/aries/spi-fly/spi-fly-core/src/main/java/org/apache/aries/spifly/BundleDescriptor.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import org.osgi.framework.Filter;
import org.osgi.framework.Version;
class BundleDescriptor {
public static final int BUNDLE_ID_UNSPECIFIED = -1;
final String symbolicName;
final Version version;
final long bundleID;
final Filter filter;
BundleDescriptor(String symbolicName) {
this(symbolicName, null);
}
BundleDescriptor(String symbolicName, Version version) {
this.symbolicName = symbolicName;
this.version = version;
this.bundleID = BUNDLE_ID_UNSPECIFIED;
this.filter = null;
}
BundleDescriptor(long bundleID) {
this.bundleID = bundleID;
this.symbolicName = null;
this.version = null;
this.filter = null;
}
BundleDescriptor(Filter filter) {
this.filter = filter;
this.bundleID = BUNDLE_ID_UNSPECIFIED;
this.symbolicName = null;
this.version = null;
}
public long getBundleID() {
return bundleID;
}
public Filter getFilter() {
return filter;
}
public String getSymbolicName() {
return symbolicName;
}
public Version getVersion() {
return version;
}
}
| 9,724 |
0 | Create_ds/aries/esa-ant-task/src/test/java/org/apache/aries/ant | Create_ds/aries/esa-ant-task/src/test/java/org/apache/aries/ant/taskdefs/EsaTaskTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.ant.taskdefs;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.types.FileSet;
import org.junit.Test;
/**
* @version $Id: $
*/
public class EsaTaskTest {
@Test
public void generateArchiveNoManifest() {
File srcDir = new File("../src/test/resources");
File destfile = new File("target/esa-test1.esa");
if (destfile.exists()) {
destfile.delete();
}
assertFalse(destfile.exists());
EsaTask esaTask = new EsaTask();
Project testProject = new Project();
esaTask.setProject(testProject);
FileSet fileSet = new FileSet();
fileSet.setDir(srcDir);
fileSet.setIncludes("*.jar");
esaTask.addFileset(fileSet);
esaTask.setDestFile(destfile);
esaTask.setSymbolicName("esatask-test");
esaTask.setVersion("1.0.0");
esaTask.execute();
assertTrue(destfile.exists());
try {
ZipFile esaArchive = new ZipFile(destfile);
assertNotNull(esaArchive);
ZipEntry subsystemManifest =
esaArchive.getEntry("OSGI-INF/SUBSYSTEM.MF");
assertNull(subsystemManifest);
}
catch (IOException e) {
fail(e.getMessage());
}
}
@Test
public void generateArchiveWithFileManifest() {
File srcDir = new File("../src/test/resources");
File destfile = new File("target/esa-test2.esa");
if (destfile.exists()) {
destfile.delete();
}
assertFalse(destfile.exists());
EsaTask esaTask = new EsaTask();
Project testProject = new Project();
esaTask.setProject(testProject);
FileSet fileSet = new FileSet();
fileSet.setDir(srcDir);
fileSet.setIncludes("*.jar");
esaTask.addFileset(fileSet);
esaTask.setDestFile(destfile);
esaTask.setSymbolicName("esatask-test");
esaTask.setVersion("1.0.0");
esaTask.setManifest(new File(srcDir, "SUBSYSTEM.MF"));
esaTask.execute();
assertTrue(destfile.exists());
try {
ZipFile esaArchive = new ZipFile(destfile);
assertNotNull(esaArchive);
ZipEntry subsystemManifest =
esaArchive.getEntry("OSGI-INF/SUBSYSTEM.MF");
assertNotNull(subsystemManifest);
}
catch (IOException e) {
fail(e.getMessage());
}
}
@Test
public void generateArchiveWithNewManifest() {
File srcDir = new File("../src/test/resources");
assertTrue(srcDir.exists());
File destfile = new File("target/esa-test.esa");
if (destfile.exists()) {
destfile.delete();
}
assertFalse(destfile.exists());
EsaTask esaTask = new EsaTask();
Project testProject = new Project();
esaTask.setProject(testProject);
FileSet fileSet = new FileSet();
fileSet.setDir(srcDir);
fileSet.setIncludes("*.jar");
esaTask.addFileset(fileSet);
esaTask.setDestFile(destfile);
esaTask.setSymbolicName("esatask-test");
esaTask.setName("ESA Test Task");
esaTask.setVersion("1.0.0");
esaTask.setGenerateManifest(true);
esaTask.execute();
assertTrue(destfile.exists());
try {
ZipFile esaArchive = new ZipFile(destfile);
assertNotNull(esaArchive);
ZipEntry subsystemManifest =
esaArchive.getEntry("OSGI-INF/SUBSYSTEM.MF");
assertNotNull(subsystemManifest);
}
catch (IOException e) {
fail(e.getMessage());
}
}
}
| 9,725 |
0 | Create_ds/aries/esa-ant-task/src/main/java/org/apache/aries/ant | Create_ds/aries/esa-ant-task/src/main/java/org/apache/aries/ant/taskdefs/EsaTask.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.ant.taskdefs;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.taskdefs.Jar;
import org.apache.tools.ant.taskdefs.Zip;
import org.apache.tools.ant.types.Resource;
import org.apache.tools.ant.types.ZipFileSet;
import org.apache.tools.ant.util.FileUtils;
import org.apache.tools.zip.ZipOutputStream;
/**
* The Ant task that will build the Enterprise Subsystem Archive, this task is
* modeled based on the Ant {@link Jar} task
*
* @version $Id: $
*/
public class EsaTask extends Zip {
/* name of the subsystem */
private String symbolicName;
/* name of the subsystem */
private String name;
/* subsystem description */
private String description;
/* version of the subsystem */
private String version;
/* subsystem type */
private String type = Constants.FEATURE_TYPE;
/* the file holder of the esa manifest */
private File manifestFile;
/* Flag to indicate whether to generate manifest */
protected boolean generateManifest;
/* Used for dry runs */
protected boolean skipWriting = false;
/* Used build the subsystem content header */
private StringBuilder subsystemContent = new StringBuilder(
Constants.SUBSYSTEM_CONTENT + ":");
public EsaTask() {
super();
archiveType = "esa";
setEncoding("UTF8");
}
/**
* @param symbolicName
* the symbolicName to set
*/
public void setSymbolicName(String symbolicName) {
this.symbolicName = symbolicName;
}
/**
* @param name
* the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @param version
* the version to set
*/
public void setVersion(String subsystemVersion) {
this.version = subsystemVersion;
}
/**
* @param type
* the type to set
*/
public void setType(String subsystemType) {
this.type = subsystemType;
}
/**
* @param manifest
* file to use the manifest to set
*/
public void setManifest(File manifestFile) {
if (!manifestFile.exists()) {
throw new BuildException("Manifest:" + manifestFile +
" does not exist", getLocation());
}
this.manifestFile = manifestFile;
}
/**
* @param generateManifest
* the generateManifest to set
*/
public void setGenerateManifest(boolean generateManifest) {
this.generateManifest = generateManifest;
}
/**
* @param skipWriting
* the skipWriting to set
*/
public void setSkipWriting(boolean skipWriting) {
this.skipWriting = skipWriting;
}
@Override
protected void zipFile(
File file, ZipOutputStream zOut, String vPath, int mode)
throws IOException {
super.zipFile(file, zOut, vPath, mode);
/*
* this handling is only for OSGi bundles and we need to exclude other
* entries
*/
if (file.isFile() &&
!Constants.SUBSYSTEM_MANIFEST_NAME.equalsIgnoreCase(vPath)) {
JarFile bundleFile = new JarFile(file);
Manifest jarManifest = bundleFile.getManifest();
if (jarManifest != null) {
Attributes mainAttributes = jarManifest.getMainAttributes();
String bundleSymbolicName =
mainAttributes.getValue(Constants.BUNDLE_SYMBOLICNAME);
String bundleVersion =
mainAttributes.getValue(Constants.BUNDLE_VERSION);
bundleFile.close();
String strSubsystemContentEntry =
bundleSymbolicName + ";version=" + bundleVersion;
subsystemContent.append(strSubsystemContentEntry);
subsystemContent.append(",");
}
}
}
@Override
protected void initZipOutputStream(ZipOutputStream zOut)
throws IOException, BuildException {
if (manifestFile != null && !generateManifest) {
zipDir(
(Resource) null, zOut, Constants.OSGI_INF_PATH,
ZipFileSet.DEFAULT_DIR_MODE, null);
zipFile(
manifestFile, zOut, Constants.SUBSYSTEM_MANIFEST_NAME,
ZipFileSet.DEFAULT_FILE_MODE);
}
}
@Override
protected void finalizeZipOutputStream(ZipOutputStream zOut)
throws IOException, BuildException {
if (!skipWriting) {
if (generateManifest) {
addNewManifest(zOut);
}
}
}
/**
* This method will add the SUBSYSTEM.MF to the esa archieve
*
* @param zOut
* -the zip output stream
*/
private void addNewManifest(ZipOutputStream zOut) {
try {
log("Generating SUBSYSTEM.MF", Project.MSG_VERBOSE);
zipDir(
(Resource) null, zOut, Constants.OSGI_INF_PATH,
ZipFileSet.DEFAULT_DIR_MODE, null);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
OutputStreamWriter osWriter = new OutputStreamWriter(bout, "UTF-8");
PrintWriter printWriter = new PrintWriter(osWriter);
// Start writing manifest content
printWriter.write(Constants.SUBSYSTEM_MANIFESTVERSION + ": " +
Constants.SUBSYSTEM_MANIFEST_VERSION_VALUE + "\n");
printWriter.write(Constants.SUBSYSTEM_SYMBOLICNAME + ": " +
symbolicName + "\n");
if (version == null) {
version = "1.0.0";
}
printWriter.write(Constants.SUBSYSTEM_VERSION + ": " + version +
"\n");
if (name == null) {
name = symbolicName;
}
printWriter.write(Constants.SUBSYSTEM_NAME + ": " + name + "\n");
printWriter.write(Constants.SUBSYSTEM_TYPE + ": " + type + "\n");
if (description != null) {
printWriter.write(Constants.SUBSYSTEM_DESCRIPTION + ": " +
description + "\n");
}
// Subsystem-content header
String subsystemContentHeader = subsystemContent.toString();
// strip the last ,
subsystemContentHeader =
subsystemContentHeader.substring(
0, (subsystemContentHeader.length() - 1)) +
"\n";
printWriter.write(subsystemContentHeader);
printWriter.close();
ByteArrayInputStream bais =
new ByteArrayInputStream(bout.toByteArray());
try {
super.zipFile(
bais, zOut, Constants.SUBSYSTEM_MANIFEST_NAME,
System.currentTimeMillis(), null,
ZipFileSet.DEFAULT_FILE_MODE);
}
finally {
// not really required
FileUtils.close(bais);
}
}
catch (IOException e) {
log("Error generating manifest", Project.MSG_ERR);
}
}
}
| 9,726 |
0 | Create_ds/aries/esa-ant-task/src/main/java/org/apache/aries/ant | Create_ds/aries/esa-ant-task/src/main/java/org/apache/aries/ant/taskdefs/BundleSelector.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.ant.taskdefs;
import java.io.File;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.types.selectors.FileSelector;
/**
* A simple Ant {@link FileSelector} that can be used to filter out valid OSGi
* bundles
*
* @version $Id: $
*/
public class BundleSelector implements FileSelector {
@Override
public boolean isSelected(File basedir, String filename, File file)
throws BuildException {
boolean isValid = false;
if (file.isFile()) {
isValid =
(filename.toLowerCase().endsWith(".esa") || filename.toLowerCase().endsWith(".jar") );
JarFile osgiBundle = null;
try {
osgiBundle = new JarFile(new File(basedir, filename));
if(osgiBundle!=null){
Manifest manifest = osgiBundle.getManifest();
isValid = isValid && manifest != null;
}
}
catch (Exception e) {
// nothing to do
isValid = false;
}
finally {
try {
if(osgiBundle!=null){
osgiBundle.close();
}
}
catch (Exception e) {
// nothing to do
}
}
}
return isValid;
}
}
| 9,727 |
0 | Create_ds/aries/esa-ant-task/src/main/java/org/apache/aries/ant | Create_ds/aries/esa-ant-task/src/main/java/org/apache/aries/ant/taskdefs/Constants.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.ant.taskdefs;
/**
*
* @version $Id: $
*/
public interface Constants {
public static final String BUNDLE_VERSION = "Bundle-Version";
public static final String BUNDLE_SYMBOLICNAME = "Bundle-SymbolicName";
public static final String FRAGMENT_HOST = "Fragment-Host";
public static final String BUNDLE_TYPE = "osgi.bundle";
public static final String FRAGMENT_TYPE = "osgi.fragment";
/*
* Subsystem types
*/
public static final String APPLICATION_TYPE = "osgi.subsystem.application";
public static final String COMPOSITE_TYPE = "osgi.subsystem.composite";
public static final String FEATURE_TYPE = "osgi.subsystem.feature";
/*
* Subsystem manifest headers
*/
public static final String SUBSYSTEM_MANIFESTVERSION = "Subsystem-ManifestVersion";
public static final String SUBSYSTEM_SYMBOLICNAME = "Subsystem-SymbolicName";
public static final String SUBSYSTEM_VERSION = "Subsystem-Version";
public static final String SUBSYSTEM_NAME = "Subsystem-Name";
public static final String SUBSYSTEM_DESCRIPTION = "Subsystem-Description";
public static final String SUBSYSTEM_CONTENT = "Subsystem-Content";
public static final String SUBSYSTEM_USEBUNDLE = "Use-Bundle";
public static final String SUBSYSTEM_TYPE = "Subsystem-Type";
public static final String OSGI_INF_PATH = "OSGI-INF/";
public static final String SUBSYSTEM_MANIFEST_NAME = "OSGI-INF/SUBSYSTEM.MF";
public static final String SUBSYSTEM_MANIFEST_VERSION_VALUE = "1";
}
| 9,728 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/NFGraphTest.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
public class NFGraphTest {
RandomizedGraphBuilder randomizedGraphBuilder;
private long seed;
private NFGraph graph;
@Before
public void setUp() throws IOException {
Random rand = new Random();
int numANodes = rand.nextInt(10000);
int numBNodes = rand.nextInt(10000);
seed = System.currentTimeMillis();
randomizedGraphBuilder = new RandomizedGraphBuilder(numANodes, numBNodes);
NFCompressedGraph compressedGraph = randomizedGraphBuilder.build(new Random(seed));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
compressedGraph.writeTo(outputStream);
ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
this.graph = NFCompressedGraph.readFrom(inputStream);
}
@Test
public void randomizedTest() {
randomizedGraphBuilder.assertGraph(graph, new Random(seed));
}
}
| 9,729 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/NFGraphMemoryPoolTest.java | package com.netflix.nfgraph;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
import com.netflix.nfgraph.util.ByteSegmentPool;
public class NFGraphMemoryPoolTest {
private ByteSegmentPool memoryPool;
@Before
public void setUp() {
memoryPool = new ByteSegmentPool(8);
}
@Test
public void swapBackAndForth() throws IOException {
ByteSegmentPool memoryPool = new ByteSegmentPool(8);
RandomizedGraphBuilder graphBuilder = new RandomizedGraphBuilder(10000, 10000);
long seed = System.currentTimeMillis();
NFCompressedGraph graph1 = graphBuilder.build(new Random(seed));
graph1 = roundTripGraph(graph1);
graphBuilder.assertGraph(graph1, new Random(seed));
NFCompressedGraph graph2 = graphBuilder.build(new Random(seed+1));
graph2 = roundTripGraph(graph2);
graphBuilder.assertGraph(graph2, new Random(seed+1));
graph1.destroy();
NFCompressedGraph graph3 = graphBuilder.build(new Random(seed+2));
graph3 = roundTripGraph(graph3);
graphBuilder.assertGraph(graph3, new Random(seed+2));
try {
/// this shouldn't work -- we have reused this memory now.
graphBuilder.assertGraph(graph1, new Random(seed));
Assert.fail();
} catch(AssertionError expected) { }
}
private NFCompressedGraph roundTripGraph(NFCompressedGraph graph) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
graph.writeTo(baos);
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
return NFCompressedGraph.readFrom(bais, memoryPool);
}
}
| 9,730 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/RandomizedGraphBuilder.java | package com.netflix.nfgraph;
import static com.netflix.nfgraph.OrdinalIterator.NO_MORE_ORDINALS;
import static com.netflix.nfgraph.spec.NFPropertySpec.COMPACT;
import static com.netflix.nfgraph.spec.NFPropertySpec.GLOBAL;
import static com.netflix.nfgraph.spec.NFPropertySpec.HASH;
import static com.netflix.nfgraph.spec.NFPropertySpec.MODEL_SPECIFIC;
import static com.netflix.nfgraph.spec.NFPropertySpec.MULTIPLE;
import static com.netflix.nfgraph.spec.NFPropertySpec.SINGLE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.HashSet;
import java.util.Random;
import com.netflix.nfgraph.build.NFBuildGraph;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
import com.netflix.nfgraph.spec.NFPropertySpec;
public class RandomizedGraphBuilder {
public static final NFGraphSpec RANDOM_GRAPH_SPEC = new NFGraphSpec(
new NFNodeSpec("node-type-a",
new NFPropertySpec("a-to-one-b-global", "node-type-b", SINGLE | GLOBAL),
new NFPropertySpec("a-to-one-b-per-model", "node-type-b", SINGLE | MODEL_SPECIFIC)
),
new NFNodeSpec("node-type-b",
new NFPropertySpec("b-to-many-a-compact-global", "node-type-a", MULTIPLE | COMPACT | GLOBAL),
new NFPropertySpec("b-to-many-a-hashed-global", "node-type-a", MULTIPLE | HASH | GLOBAL),
new NFPropertySpec("b-to-many-a-compact-per-model", "node-type-a", MULTIPLE | COMPACT | MODEL_SPECIFIC),
new NFPropertySpec("b-to-many-a-hashed-per-model", "node-type-a", MULTIPLE | HASH | MODEL_SPECIFIC)
)
);
private final int numANodes;
private final int numBNodes;
public RandomizedGraphBuilder(int numANodes, int numBNodes) {
this.numANodes = numANodes;
this.numBNodes = numBNodes;
}
public NFCompressedGraph build(Random rand) {
NFBuildGraph graph = new NFBuildGraph(RANDOM_GRAPH_SPEC);
graph.addConnectionModel("model-1");
graph.addConnectionModel("model-2");
for(int i=0; i < numANodes;i++) {
if(rand.nextBoolean())
graph.addConnection("node-type-a", i, "a-to-one-b-global", rand.nextInt(numBNodes));
if(rand.nextBoolean())
graph.addConnection("model-1", "node-type-a", i, "a-to-one-b-per-model", rand.nextInt(numBNodes));
if(rand.nextBoolean())
graph.addConnection("model-2", "node-type-a", i, "a-to-one-b-per-model", rand.nextInt(numBNodes));
}
for(int i=0; i < numBNodes;i++) {
addMultipleRandomConnections(rand, graph, i, "global", "b-to-many-a-compact-global");
addMultipleRandomConnections(rand, graph, i, "global", "b-to-many-a-hashed-global");
addMultipleRandomConnections(rand, graph, i, "model-1", "b-to-many-a-compact-per-model");
addMultipleRandomConnections(rand, graph, i, "model-2", "b-to-many-a-compact-per-model");
addMultipleRandomConnections(rand, graph, i, "model-1", "b-to-many-a-hashed-per-model");
addMultipleRandomConnections(rand, graph, i, "model-2", "b-to-many-a-hashed-per-model");
}
return graph.compress();
}
private void addMultipleRandomConnections(Random rand, NFBuildGraph graph, int fromOrdinal, String model, String propertyName) {
if(rand.nextBoolean()) {
HashSet<Integer> connections = buildRandomConnectionSet(rand);
for(Integer connection : connections) {
graph.addConnection(model, "node-type-b", fromOrdinal, propertyName, connection.intValue());
}
}
}
public void assertGraph(NFGraph graph, Random rand) {
for(int i=0;i<numANodes;i++) {
int conn = graph.getConnection("node-type-a", i, "a-to-one-b-global");
int expected = rand.nextBoolean() ? rand.nextInt(numBNodes) : -1;
assertEquals(expected, conn);
conn = graph.getConnection("model-1", "node-type-a", i, "a-to-one-b-per-model");
expected = rand.nextBoolean() ? rand.nextInt(numBNodes) : -1;
assertEquals(expected, conn);
conn = graph.getConnection("model-2", "node-type-a", i, "a-to-one-b-per-model");
expected = rand.nextBoolean() ? rand.nextInt(numBNodes) : -1;
assertEquals(expected, conn);
}
for(int i=0;i<numBNodes;i++) {
assertMultipleConnections(graph, rand, "global", i, "b-to-many-a-compact-global");
assertMultipleConnections(graph, rand, "global", i, "b-to-many-a-hashed-global");
assertMultipleConnections(graph, rand, "model-1", i, "b-to-many-a-compact-per-model");
assertMultipleConnections(graph, rand, "model-2", i, "b-to-many-a-compact-per-model");
assertMultipleConnections(graph, rand, "model-1", i, "b-to-many-a-hashed-per-model");
assertMultipleConnections(graph, rand, "model-2", i, "b-to-many-a-hashed-per-model");
}
}
private void assertMultipleConnections(NFGraph graph, Random rand, String model, int fromOrdinal, String propertyName) {
OrdinalSet set = graph.getConnectionSet(model, "node-type-b", fromOrdinal, propertyName);
if(!rand.nextBoolean()) {
assertEquals(0, set.size());
return;
}
HashSet<Integer> connections = buildRandomConnectionSet(rand);
OrdinalIterator iter = set.iterator();
int actualOrdinal = iter.nextOrdinal();
while(actualOrdinal != NO_MORE_ORDINALS) {
assertTrue(String.valueOf(actualOrdinal), connections.contains(actualOrdinal));
actualOrdinal = iter.nextOrdinal();
}
assertEquals(connections.size(), set.size());
}
private HashSet<Integer> buildRandomConnectionSet(Random rand) {
int numConnections = rand.nextInt(100);
HashSet<Integer> connections = new HashSet<Integer>();
for(int j=0;j<numConnections;j++) {
int connectedTo = rand.nextInt(numANodes);
while(connections.contains(connectedTo))
connectedTo = rand.nextInt(numANodes);
connections.add(connectedTo);
}
return connections;
}
}
| 9,731 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersSerializerTest.java | /*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
import com.netflix.nfgraph.compressed.NFCompressedGraphIntPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphLongPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Test;
public class NFCompressedGraphPointersSerializerTest {
@Test
public void dataLengthLessthan4GBUsesIntegerPointers() throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
pointers.addPointers("test", new long[] { 1, 2, 3 });
NFCompressedGraphPointersSerializer serializer = new NFCompressedGraphPointersSerializer(pointers, (long)Integer.MAX_VALUE * 2);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
serializer.serializePointers(dos);
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
NFCompressedGraphPointers deserialized = new NFCompressedGraphPointersDeserializer().deserializePointers(dis);
Assert.assertTrue(deserialized instanceof NFCompressedGraphIntPointers);
}
@Test
public void dataLengthGreaterThan4GBUsesLongPointers() throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
pointers.addPointers("test", new long[] { 1, 2, 3 });
NFCompressedGraphPointersSerializer serializer = new NFCompressedGraphPointersSerializer(pointers, (long)Integer.MAX_VALUE * 3);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
serializer.serializePointers(dos);
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
NFCompressedGraphPointers deserialized = new NFCompressedGraphPointersDeserializer().deserializePointers(dis);
Assert.assertTrue(deserialized instanceof NFCompressedGraphLongPointers);
}
@Test
public void pointersMightStartGreaterThan2GB() throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
long bigStartVal = Integer.MAX_VALUE;
bigStartVal += 5;
long[] ptrs = new long[] { bigStartVal, bigStartVal + 10, bigStartVal + 20, bigStartVal + 100 };
pointers.addPointers("Test", ptrs);
NFCompressedGraphPointersSerializer serializer = new NFCompressedGraphPointersSerializer(pointers, bigStartVal + 125);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
serializer.serializePointers(new DataOutputStream(baos));
NFCompressedGraphPointersDeserializer deserializer = new NFCompressedGraphPointersDeserializer();
NFCompressedGraphPointers deserialized = deserializer.deserializePointers(new DataInputStream(new ByteArrayInputStream(baos.toByteArray())));
for(int i=0;i<ptrs.length;i++) {
Assert.assertEquals(ptrs[i], deserialized.getPointer("Test", i));
}
Assert.assertTrue(deserialized instanceof NFCompressedGraphIntPointers);
}
@Test
public void pointersMightStartGreaterThan4GB() throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
long bigStartVal = Integer.MAX_VALUE;
bigStartVal *= 5;
long[] ptrs = new long[] { bigStartVal, bigStartVal + 10, bigStartVal + 20, bigStartVal + 100 };
pointers.addPointers("Test", ptrs);
NFCompressedGraphPointersSerializer serializer = new NFCompressedGraphPointersSerializer(pointers, bigStartVal + 125);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
serializer.serializePointers(new DataOutputStream(baos));
NFCompressedGraphPointersDeserializer deserializer = new NFCompressedGraphPointersDeserializer();
NFCompressedGraphPointers deserialized = deserializer.deserializePointers(new DataInputStream(new ByteArrayInputStream(baos.toByteArray())));
for(int i=0;i<ptrs.length;i++) {
Assert.assertEquals(ptrs[i], deserialized.getPointer("Test", i));
}
Assert.assertTrue(deserialized instanceof NFCompressedGraphLongPointers);
}
}
| 9,732 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/util/OrdinalMapTest.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.util;
import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.util.OrdinalMap;
public class OrdinalMapTest {
private OrdinalMap<String> ordinalMap;
@Before
public void setUp() {
this.ordinalMap = new OrdinalMap<String>();
}
@Test
public void assignsOrdinalsInOrder() {
for(int i=0; i<10; i++) {
assertEquals(i, ordinalMap.add(String.valueOf(i)));
}
}
@Test
public void retainsMappingFromObjectToOrdinal() {
for(int i=0;i<1000;i++) {
ordinalMap.add(String.valueOf(i));
}
for(int i=0;i<1000;i++) {
assertEquals(i, ordinalMap.get(String.valueOf(i)));
}
}
@Test
public void retainsMappingFromOrdinalToObject() {
for(int i=0;i<1000;i++) {
ordinalMap.add(String.valueOf(i));
}
for(int i=0;i<1000;i++) {
assertEquals(String.valueOf(i), ordinalMap.get(i));
}
}
}
| 9,733 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/spec/NFPropertySpecTest.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.spec;
import static com.netflix.nfgraph.spec.NFPropertySpec.HASH;
import static com.netflix.nfgraph.spec.NFPropertySpec.MODEL_SPECIFIC;
import static com.netflix.nfgraph.spec.NFPropertySpec.SINGLE;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public class NFPropertySpecTest {
@Test
public void testInstantiateWithFlagsDefault() {
NFPropertySpec propertySpec = new NFPropertySpec(null, null, 0);
assertTrue(propertySpec.isGlobal());
assertTrue(propertySpec.isMultiple());
assertFalse(propertySpec.isHashed());
}
@Test
public void testInstantiateWithFlags() {
NFPropertySpec propertySpec = new NFPropertySpec(null, null, MODEL_SPECIFIC | HASH | SINGLE);
assertFalse(propertySpec.isGlobal());
assertFalse(propertySpec.isMultiple());
assertTrue(propertySpec.isHashed());
}
}
| 9,734 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/HashSetTest.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressor.HashedPropertyBuilder;
import com.netflix.nfgraph.util.ByteArrayBuffer;
import com.netflix.nfgraph.util.ByteArrayReader;
import com.netflix.nfgraph.util.ByteData;
import org.junit.Test;
public class HashSetTest extends EncodedConnectionSetTest {
@Override
protected ByteData generateCompressedData(OrdinalSet ordinals) {
ByteArrayBuffer buf = new ByteArrayBuffer();
HashedPropertyBuilder builder = new HashedPropertyBuilder(buf);
builder.buildProperty(ordinals);
dataLength = buf.length();
return buf.getData();
}
@Override
protected OrdinalSet createOrdinalSet() {
ByteArrayReader reader = new ByteArrayReader(data, 0, dataLength);
return new HashSetOrdinalSet(reader);
}
@Override
protected int maximumTotalOrdinals() {
return 100000;
}
@Test
public void singleOrdinal127IsSizedAppropriately() {
ByteArrayBuffer buf = new ByteArrayBuffer();
HashedPropertyBuilder builder = new HashedPropertyBuilder(buf);
builder.buildProperty(new SingleOrdinalSet(127));
ByteArrayReader reader = new ByteArrayReader(buf.getData(), 0, buf.length());
OrdinalSet set = new HashSetOrdinalSet(reader);
assertTrue(set.contains(127));
assertFalse(set.contains(128));
}
}
| 9,735 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/NFCompressedGraphTest.java | package com.netflix.nfgraph.compressed;
import static com.netflix.nfgraph.OrdinalIterator.NO_MORE_ORDINALS;
import static com.netflix.nfgraph.spec.NFPropertySpec.GLOBAL;
import static com.netflix.nfgraph.spec.NFPropertySpec.MULTIPLE;
import static com.netflix.nfgraph.spec.NFPropertySpec.SINGLE;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.build.NFBuildGraph;
import com.netflix.nfgraph.exception.NFGraphException;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
import com.netflix.nfgraph.spec.NFPropertySpec;
public class NFCompressedGraphTest {
private NFCompressedGraph compressedGraph;
@Before
public void setUp() {
NFGraphSpec spec = new NFGraphSpec(
new NFNodeSpec(
"a",
new NFPropertySpec("multiple", "b", GLOBAL | MULTIPLE),
new NFPropertySpec("single", "b", GLOBAL | SINGLE)
),
new NFNodeSpec("b")
);
NFBuildGraph graph = new NFBuildGraph(spec);
graph.addConnection("a", 0, "multiple", 0);
graph.addConnection("a", 0, "multiple", 1);
graph.addConnection("a", 0, "single", 0);
compressedGraph = graph.compress();
}
@Test
public void returnsValidOrdinalSetForSingleConnections() {
OrdinalSet set = compressedGraph.getConnectionSet("a", 0, "single");
assertEquals(1, set.size());
assertEquals(true, set.contains(0));
assertArrayEquals(new int[] { 0 }, set.asArray());
}
@Test
public void returnsValidOrdinalIteratorForSingleConnections() {
OrdinalIterator iter = compressedGraph.getConnectionIterator("a", 0, "single");
assertEquals(0, iter.nextOrdinal());
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
iter.reset();
assertEquals(0, iter.nextOrdinal());
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
}
@Test
public void returnsFirstOrdinalForMultipleConnections() {
int ordinal = compressedGraph.getConnection("a", 0, "multiple");
assertEquals(0, ordinal);
}
@Test
public void returnsNegativeOneForUndefinedConnections() {
int ordinal = compressedGraph.getConnection("a", 1, "multiple");
assertEquals(-1, ordinal);
}
@Test
public void returnsEmptySetForUndefinedConnections() {
OrdinalSet set = compressedGraph.getConnectionSet("a", 1, "multiple");
assertEquals(0, set.size());
}
@Test
public void returnsEmptyIteratorForUndefinedConnections() {
OrdinalIterator iter = compressedGraph.getConnectionIterator("a", 1, "multiple");
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
}
@Test
public void throwsNFGraphExceptionWhenQueryingForUndefinedNodeType() {
try {
compressedGraph.getConnectionSet("undefined", 0, "multiple");
Assert.fail("NFGraphException should have been thrown");
} catch(NFGraphException expected) { }
}
@Test
public void throwsNFGraphExceptionWhenQueryingForUndefinedProperty() {
try {
compressedGraph.getConnectionIterator("a", 0, "undefined");
Assert.fail("NFGraphException should have been thrown");
} catch(NFGraphException expected) { }
}
}
| 9,736 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/EncodedConnectionSetTest.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import static com.netflix.nfgraph.OrdinalIterator.NO_MORE_ORDINALS;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.build.NFBuildGraphOrdinalSet;
import com.netflix.nfgraph.util.ByteData;
public abstract class EncodedConnectionSetTest {
protected int totalOrdinals;
protected Set<Integer> expectedOrdinals;
protected ByteData data;
protected long dataLength;
protected Random rand;
protected long seed;
@Before
public void setUp() {
createRandom();
totalOrdinals = rand.nextInt(maximumTotalOrdinals());
OrdinalSet ordinals = generateExpectedOrdinals(totalOrdinals);
data = generateCompressedData(ordinals);
}
protected abstract ByteData generateCompressedData(OrdinalSet ordinals);
protected abstract OrdinalSet createOrdinalSet();
protected abstract int maximumTotalOrdinals();
private void createRandom() {
seed = System.currentTimeMillis();
rand = new Random(seed);
}
private OrdinalSet generateExpectedOrdinals(int totalOrdinals) {
expectedOrdinals = new HashSet<Integer>();
int numOrdinalsInSet = rand.nextInt(totalOrdinals);
int ordinals[] = new int[numOrdinalsInSet];
for(int i=0; i<ordinals.length; i++) {
int ordinal = rand.nextInt(totalOrdinals);
while(expectedOrdinals.contains(ordinal))
ordinal = rand.nextInt(totalOrdinals);
ordinals[i] = ordinal;
expectedOrdinals.add(ordinal);
}
return new NFBuildGraphOrdinalSet(ordinals, ordinals.length);
}
@Test
public void ordinalSetSizeIsCorrect() {
OrdinalSet ordinalSet = createOrdinalSet();
assertEquals("seed: " + seed, expectedOrdinals.size(), ordinalSet.size());
}
@Test
public void ordinalSetContainsExpectedOrdinals() {
OrdinalSet ordinalSet = createOrdinalSet();
for(Integer expected : expectedOrdinals) {
assertTrue("expected: " + expected.intValue() + " seed: " + seed, ordinalSet.contains(expected.intValue()));
}
}
@Test
public void ordinalSetContainsAll() {
OrdinalSet ordinalSet = createOrdinalSet();
int expected[] = new int[expectedOrdinals.size()];
int expectedIdx = 0;
for(Integer expect : expectedOrdinals) {
if(rand.nextBoolean()) {
expected[expectedIdx++] = expect.intValue();
}
}
assertTrue("seed: " + seed, ordinalSet.containsAll(Arrays.copyOf(expected, expectedIdx)));
}
@Test
public void ordinalSetContainsMostButNotAll() {
OrdinalSet ordinalSet = createOrdinalSet();
int unexpected[] = new int[expectedOrdinals.size() + 1];
int unexpectedIdx = 0;
boolean addedUnexpected = false;
for(Integer expect : expectedOrdinals) {
if(rand.nextBoolean()) {
unexpected[unexpectedIdx++] = expect.intValue();
}
if(rand.nextInt(5) == 0) {
unexpected[unexpectedIdx++] = generateUnexpectedOrdinal();
addedUnexpected = true;
}
}
if(!addedUnexpected) {
unexpected[unexpectedIdx++] = generateUnexpectedOrdinal();
}
assertFalse("seed: " + seed, ordinalSet.containsAll(Arrays.copyOf(unexpected, unexpectedIdx)));
}
@Test
public void ordinalSetDoesNotContainUnexpectedOrdinals() {
OrdinalSet ordinalSet = createOrdinalSet();
for(int i=0;i<totalOrdinals;i++) {
if(!expectedOrdinals.contains(i)) {
assertFalse("seed: " + seed, ordinalSet.contains(i));
}
}
}
@Test
public void iteratorReturnsArray() {
OrdinalSet ordinalSet = createOrdinalSet();
int arr[] = ordinalSet.asArray();
for(int ordinal : arr) {
assertTrue("seed: " + seed, expectedOrdinals.contains(Integer.valueOf(ordinal)));
}
assertEquals(expectedOrdinals.size(), arr.length);
}
@Test
public void iteratorReturnsAllExpectedOrdinalsOnce() {
OrdinalIterator iter = createOrdinalSet().iterator();
Set<Integer> returnedOrdinals = new HashSet<Integer>();
int counter = 0;
try {
int ordinal = iter.nextOrdinal();
while(ordinal != NO_MORE_ORDINALS) {
counter++;
assertTrue("seed: " + seed, expectedOrdinals.contains(ordinal));
returnedOrdinals.add(ordinal);
ordinal = iter.nextOrdinal();
}
} catch(Throwable t) {
t.printStackTrace();
fail("seed: " + seed);
}
assertEquals("seed: " + seed, expectedOrdinals.size(), returnedOrdinals.size());
assertEquals("seed: " + seed, expectedOrdinals.size(), counter);
}
private int generateUnexpectedOrdinal() {
int unexpectedOrdinal = rand.nextInt(totalOrdinals);
while(expectedOrdinals.contains(unexpectedOrdinal))
unexpectedOrdinal = rand.nextInt(totalOrdinals);
return unexpectedOrdinal;
}
}
| 9,737 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/BitSetTest.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressor.BitSetPropertyBuilder;
import com.netflix.nfgraph.compressor.HashedPropertyBuilder;
import com.netflix.nfgraph.util.ByteArrayBuffer;
import com.netflix.nfgraph.util.ByteArrayReader;
import com.netflix.nfgraph.util.ByteData;
import com.netflix.nfgraph.util.SimpleByteArray;
import org.junit.Assert;
import org.junit.Test;
public class BitSetTest extends EncodedConnectionSetTest {
@Override
protected ByteData generateCompressedData(OrdinalSet ordinals) {
ByteArrayBuffer buf = new ByteArrayBuffer();
BitSetPropertyBuilder builder = new BitSetPropertyBuilder(buf);
builder.buildProperty(ordinals, totalOrdinals);
dataLength = buf.length();
return buf.getData();
}
@Override
protected OrdinalSet createOrdinalSet() {
ByteArrayReader reader = new ByteArrayReader(data, 0, dataLength);
return new BitSetOrdinalSet(reader);
}
@Override
protected int maximumTotalOrdinals() {
return 100000;
}
@Test
public void bitSetDoesNotAttemptToReadPastRange() {
byte[] data = new byte[] { 1, 1, 1 };
ByteArrayReader reader = new ByteArrayReader(new SimpleByteArray(data), 1, 2);
BitSetOrdinalSet set = new BitSetOrdinalSet(reader);
Assert.assertEquals(1, set.size());
Assert.assertTrue(set.contains(0));
Assert.assertFalse(set.contains(8));
}
}
| 9,738 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/CompactSetTest.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressor.CompactPropertyBuilder;
import com.netflix.nfgraph.util.ByteArrayBuffer;
import com.netflix.nfgraph.util.ByteArrayReader;
import com.netflix.nfgraph.util.ByteData;
public class CompactSetTest extends EncodedConnectionSetTest {
@Override
protected ByteData generateCompressedData(OrdinalSet ordinals) {
ByteArrayBuffer buf = new ByteArrayBuffer();
CompactPropertyBuilder builder = new CompactPropertyBuilder(buf);
builder.buildProperty(ordinals);
dataLength = buf.length();
return buf.getData();
}
@Override
protected OrdinalSet createOrdinalSet() {
ByteArrayReader reader = new ByteArrayReader(data, 0, dataLength);
return new CompactOrdinalSet(reader);
}
@Override
protected int maximumTotalOrdinals() {
return 1000;
}
}
| 9,739 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/NFCompressedGraphIntPointersTest.java | /*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import org.junit.Assert;
import org.junit.Test;
public class NFCompressedGraphIntPointersTest {
@Test
public void test() {
NFCompressedGraphIntPointers pointers = new NFCompressedGraphIntPointers();
long ptr0 = ((long)Integer.MAX_VALUE + 1000);
long ptr1 = ((long)Integer.MAX_VALUE * 2);
pointers.addPointers("test", new int[] { (int)ptr0, (int)ptr1 });
Assert.assertEquals(ptr0, pointers.getPointer("test", 0));
Assert.assertEquals(ptr1, pointers.getPointer("test", 1));
}
}
| 9,740 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/compressed/VIntTest.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import static org.junit.Assert.assertEquals;
import java.util.Random;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.util.ByteArrayBuffer;
import com.netflix.nfgraph.util.ByteArrayReader;
import com.netflix.nfgraph.util.ByteData;
public class VIntTest {
private int randomValues[];
private ByteData data;
private long seed;
@Before
public void setUp() {
seed = System.currentTimeMillis();
Random rand = new Random(seed);
ByteArrayBuffer buf = new ByteArrayBuffer();
randomValues = new int[rand.nextInt(10000)];
for(int i=0;i<randomValues.length;i++) {
randomValues[i] = rand.nextInt(Integer.MAX_VALUE);
buf.writeVInt(randomValues[i]);
}
data = buf.getData();
}
@Test
public void decodedValuesAreSameAsEncodedValues() {
ByteArrayReader reader = new ByteArrayReader(data, 0);
for(int i=0;i<randomValues.length;i++) {
assertEquals("seed: " + seed, randomValues[i], reader.readVInt());
}
}
}
| 9,741 |
0 | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/test/java/com/netflix/nfgraph/build/NFBuildGraphOrdinalIteratorTest.java | package com.netflix.nfgraph.build;
import static com.netflix.nfgraph.OrdinalIterator.NO_MORE_ORDINALS;
import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.Test;
import com.netflix.nfgraph.OrdinalIterator;
public class NFBuildGraphOrdinalIteratorTest {
private NFBuildGraphOrdinalIterator iter;
@Before
public void setUp() {
iter = new NFBuildGraphOrdinalIterator(new int[] { 2, 1, 2, 3, 4 }, 4);
}
@Test
public void iteratesOverOrdinalsInOrder() {
assertEquals(1, iter.nextOrdinal());
assertEquals(2, iter.nextOrdinal());
assertEquals(3, iter.nextOrdinal());
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
}
@Test
public void canBeReset() {
for(int i=0;i<10;i++)
iter.nextOrdinal();
iter.reset();
assertEquals(1, iter.nextOrdinal());
assertEquals(2, iter.nextOrdinal());
assertEquals(3, iter.nextOrdinal());
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
}
@Test
public void copyContainsSameOrdinals() {
OrdinalIterator iter = this.iter.copy();
assertEquals(1, iter.nextOrdinal());
assertEquals(2, iter.nextOrdinal());
assertEquals(3, iter.nextOrdinal());
assertEquals(NO_MORE_ORDINALS, iter.nextOrdinal());
}
@Test
public void isOrdered() {
assertEquals(true, iter.isOrdered());
}
}
| 9,742 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/NFGraphModelHolder.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph;
import java.util.Iterator;
import com.netflix.nfgraph.util.OrdinalMap;
/**
* <code>NFGraphModelHolder</code> maintains an ordering over the models in a given NFGraph.<p>
*
* An {@link NFGraph} may contain one or more "connection models". A "connection model" is a grouping of the set of connections
* between nodes in the graph.<p>
*
* Connections added for a connection model will be visible only for that model. Use of multiple connection models will
* add a minimum of one byte per model-specific connection set per node. As a result, this feature should be used only
* when the number of connection models is and will remain low.<p>
*
* It is unlikely that this class will need to be used externally.
*/
public class NFGraphModelHolder implements Iterable<String> {
public static final String CONNECTION_MODEL_GLOBAL = "global";
private OrdinalMap<String> modelMap;
public NFGraphModelHolder() {
modelMap = new OrdinalMap<String>();
modelMap.add(CONNECTION_MODEL_GLOBAL);
}
public int size() {
return modelMap.size();
}
public int getModelIndex(String connectionModel) {
return modelMap.add(connectionModel);
}
public String getModel(int modelIndex) {
return modelMap.get(modelIndex);
}
public Iterator<String> iterator() {
return modelMap.iterator();
}
}
| 9,743 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/OrdinalIterator.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph;
import com.netflix.nfgraph.compressed.BitSetOrdinalIterator;
import com.netflix.nfgraph.compressed.CompactOrdinalIterator;
import com.netflix.nfgraph.compressed.HashSetOrdinalIterator;
/**
* <code>OrdinalIterator</code> is the interface used to iterate over a set of connections.<p>
*
* An <code>OrdinalIterator</code> may be obtained for a set of connections directly from an {@link NFGraph} or via an {@link OrdinalSet}
* obtained from an {@link NFGraph}.
*
* @see CompactOrdinalIterator
* @see HashSetOrdinalIterator
* @see BitSetOrdinalIterator
*
*/
public interface OrdinalIterator {
/**
* This value will be returned from <code>nextOrdinal()</code> after the iteration is completed.
*/
public static final int NO_MORE_ORDINALS = Integer.MAX_VALUE;
/**
* @return the next ordinal in this set.
*/
public int nextOrdinal();
/**
* Rewinds this <code>OrdinalIterator</code> to the beginning of the set.
*/
public void reset();
/**
* Obtain a copy of this <code>OrdinalIterator</code>. The returned <code>OrdinalIterator</code> will be reset to the beginning of the set.
*/
public OrdinalIterator copy();
/**
* @return <code>true</code> if the ordinals returned from this set are guaranteed to be in ascending order. Returns <code>false</code> otherwise.
*/
public boolean isOrdered();
/**
* An iterator which always return <code>OrdinalIterator.NO_MORE_ORDINALS</code>
*/
public static final OrdinalIterator EMPTY_ITERATOR = new OrdinalIterator() {
@Override public int nextOrdinal() { return NO_MORE_ORDINALS; }
@Override public void reset() { }
@Override public OrdinalIterator copy() { return this; }
@Override public boolean isOrdered() { return true; }
};
}
| 9,744 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/OrdinalSet.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph;
import static com.netflix.nfgraph.OrdinalIterator.EMPTY_ITERATOR;
import static com.netflix.nfgraph.OrdinalIterator.NO_MORE_ORDINALS;
import com.netflix.nfgraph.build.NFBuildGraphOrdinalSet;
import com.netflix.nfgraph.compressed.BitSetOrdinalSet;
import com.netflix.nfgraph.compressed.CompactOrdinalSet;
import com.netflix.nfgraph.compressed.HashSetOrdinalSet;
/**
* <code>OrdinalSet</code> is the interface used to represent a set of connections.<p>
*
* An <code>OrdinalSet</code> is obtained directly from an {@link NFGraph}.
*
* @see NFGraph
*
*/
public abstract class OrdinalSet {
/**
* Returns <code>true</code> when the specified value is contained in this set. Depending on the implementation,
* this operation will have one of two performance characteristics:<p>
*
* <code>O(1)</code> for {@link HashSetOrdinalSet} and {@link BitSetOrdinalSet}<br>
* <code>O(n)</code> for {@link CompactOrdinalSet} and {@link NFBuildGraphOrdinalSet}
*/
public abstract boolean contains(int value);
/**
* Returns <code>true</code> when all specified values are contained in this set. Depending on the implementation,
* this operation will have one of two performance characteristics:<p>
*
* <code>O(m)</code> for {@link HashSetOrdinalSet} and {@link BitSetOrdinalSet}, where <code>m</code> is the number of specified elements.<br>
* <code>O(n)</code> for {@link CompactOrdinalSet}, where <code>n</code> is the number of elements in the set.<br>
* <code>O(n * m)</code> for {@link NFBuildGraphOrdinalSet}.
*/
public boolean containsAll(int... values) {
for(int value : values) {
if(!contains(value))
return false;
}
return true;
}
/**
* Returns an array containing all elements in the set.
*/
public int[] asArray() {
int arr[] = new int[size()];
OrdinalIterator iter = iterator();
int ordinal = iter.nextOrdinal();
int i = 0;
while(ordinal != NO_MORE_ORDINALS) {
arr[i++] = ordinal;
ordinal = iter.nextOrdinal();
}
return arr;
}
/**
* @return an {@link OrdinalIterator} over this set.
*/
public abstract OrdinalIterator iterator();
/**
* @return the number of ordinals in this set.
*/
public abstract int size();
private static final int EMPTY_ORDINAL_ARRAY[] = new int[0];
/**
* An empty <code>OrdinalSet</code>.
*/
public static final OrdinalSet EMPTY_SET = new OrdinalSet() {
@Override public boolean contains(int value) { return false; }
@Override public int[] asArray() { return EMPTY_ORDINAL_ARRAY; }
@Override public OrdinalIterator iterator() { return EMPTY_ITERATOR; }
@Override public int size() { return 0; }
};
}
| 9,745 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/NFGraph.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph;
import com.netflix.nfgraph.build.NFBuildGraph;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.util.OrdinalMap;
/**
* <code>NFGraph</code> represents a directed graph and is the base class for the two flavors of NetflixGraph
* ({@link NFBuildGraph} and {@link NFCompressedGraph}). It defines the operations for retrieving connections
* in the graph, given some node and property.<p>
*
* In the NetflixGraph library, each node in your graph is expected to be uniquely represented as a "type" and "ordinal".
* Each "type" will be referred to by some String. An "ordinal", in this sense, is a number that uniquely defines the node
* given its type. If a type of node has "n" instances, then each instance should be representable by some unique value
* from 0 through (n-1). If nodes in the graph are represented as Objects externally to the NetflixGraph library, then
* developers may find it helpful to use an {@link OrdinalMap} for each type to create and maintain a mapping between objects
* and their ordinals. The {@link OrdinalMap} has been optimized with this use case in mind. <p>
*
* Use of the NFGraph is expected to generally follow some lifecycle:<p>
* <ol>
* <li>Define an {@link NFGraphSpec}, which serves as the schema for the graph data.</li>
* <li>Instantiate an {@link NFBuildGraph}, then populate it with connections.</li>
* <li>Compress the {@link NFBuildGraph}, which will return a representation of the data as an {@link NFCompressedGraph}.</li>
* <li>Serialize the {@link NFCompressedGraph} to a stream. Netflix, for example, has a use case which streams this graph to Amazon Web Service's S3.</li>
* <li>Deserialize the stream where the compact in-memory representation of the graph data is necessary.</li>
* </ol><p>
*
* In some cases, the location where the compact in-memory representation is necessary is the same as the location where this
* representation will be built. In these cases, steps (4) and (5) above will be omitted.<p>
*
* If there will be a producer of this graph and one or more consumers, then your producer code will resemble:<p>
*
* <pre>
* {@code
* NFGraphSpec spec = new NFGraphSpec( ... );
*
* NFBuildGraph buildGraph = new NFBuildGraph(spec);
*
* for( ... each connection between nodes ... ) {
* graph.addConnection( ... );
* }
*
* NFCompressedGraph compressedGraph = buildGraph.compress();
*
* OutputStream os = ... stream to where you want the serialized data ...;
*
* compressedGraph.writeTo(os);
* }
* </pre>
*
* And your consumer code will resemble:<p>
*
* <pre>
* {@code
* InputStream is = ... stream from where the serialized data was written ...;
*
* NFGraph graph = NFCompressedGraph.readFrom(is);
* }
* </pre>
*
* @see NFGraphSpec
* @see NFBuildGraph
* @see NFCompressedGraph
*
* @author dkoszewnik
*/
public abstract class NFGraph {
protected final NFGraphSpec graphSpec;
protected final NFGraphModelHolder modelHolder;
protected NFGraph(NFGraphSpec graphSpec) {
this.graphSpec = graphSpec;
this.modelHolder = new NFGraphModelHolder();
}
protected NFGraph(NFGraphSpec graphSpec, NFGraphModelHolder modelHolder) {
this.graphSpec = graphSpec;
this.modelHolder = modelHolder;
}
/**
* Retrieve a single connected ordinal, given the type and ordinal of the originating node, and the property by which this node is connected.
*
* @return the connected ordinal, or -1 if there is no such ordinal
*/
public int getConnection(String nodeType, int ordinal, String propertyName) {
return getConnection(0, nodeType, ordinal, propertyName);
}
/**
* Retrieve a single connected ordinal in a given connection model, given the type and ordinal of the originating node, and the property by which this node is connected.
*
* @return the connected ordinal, or -1 if there is no such ordinal
*/
public int getConnection(String connectionModel, String nodeType, int ordinal, String propertyName) {
int connectionModelIndex = modelHolder.getModelIndex(connectionModel);
return getConnection(connectionModelIndex, nodeType, ordinal, propertyName);
}
/**
* Retrieve an {@link OrdinalIterator} over all connected ordinals, given the type and ordinal of the originating node, and the property by which this node is connected.
*
* @return an {@link OrdinalIterator} over all connected ordinals
*/
public OrdinalIterator getConnectionIterator(String nodeType, int ordinal, String propertyName) {
return getConnectionIterator(0, nodeType, ordinal, propertyName);
}
/**
* Retrieve an {@link OrdinalIterator} over all connected ordinals in a given connection model, given the type and ordinal of the originating node, and the property by which this node is connected.
*
* @return an {@link OrdinalIterator} over all connected ordinals
*/
public OrdinalIterator getConnectionIterator(String connectionModel, String nodeType, int ordinal, String propertyName) {
int connectionModelIndex = modelHolder.getModelIndex(connectionModel);
return getConnectionIterator(connectionModelIndex, nodeType, ordinal, propertyName);
}
/**
* Retrieve an {@link OrdinalSet} over all connected ordinals, given the type and ordinal of the originating node, and the property by which this node is connected.
*
* @return an {@link OrdinalSet} over all connected ordinals
*/
public OrdinalSet getConnectionSet(String nodeType, int ordinal, String propertyName) {
return getConnectionSet(0, nodeType, ordinal, propertyName);
}
/**
* Retrieve an {@link OrdinalSet} over all connected ordinals in a given connection model, given the type and ordinal of the originating node, and the property by which this node is connected.
*
* @return an {@link OrdinalSet} over all connected ordinals
*/
public OrdinalSet getConnectionSet(String connectionModel, String nodeType, int ordinal, String propertyName) {
int connectionModelIndex = modelHolder.getModelIndex(connectionModel);
return getConnectionSet(connectionModelIndex, nodeType, ordinal, propertyName);
}
protected abstract int getConnection(int connectionModelIndex, String nodeType, int ordinal, String propertyName);
protected abstract OrdinalSet getConnectionSet(int connectionModelIndex, String nodeType, int ordinal, String propertyName);
protected abstract OrdinalIterator getConnectionIterator(int connectionModelIndex, String nodeType, int ordinal, String propertyName);
}
| 9,746 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersDeserializer.java | /*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
import com.netflix.nfgraph.compressed.NFCompressedGraphIntPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphLongPointers;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import com.netflix.nfgraph.util.ByteArrayReader;
import com.netflix.nfgraph.util.SimpleByteArray;
import java.io.DataInputStream;
import java.io.IOException;
public class NFCompressedGraphPointersDeserializer {
NFCompressedGraphPointers deserializePointers(DataInputStream dis) throws IOException {
int numTypes = dis.readInt();
/// Backwards compatibility: The representation of the pointers is encoded as
/// In order to maintain backwards compatibility of produced artifacts,
/// if more than 32 bits is required to represent the pointers, then flag
/// the sign bit in the serialized number of node types.
if((numTypes & Integer.MIN_VALUE) != 0) {
numTypes &= Integer.MAX_VALUE;
return deserializeLongPointers(dis, numTypes & Integer.MAX_VALUE);
}
return deserializeIntPointers(dis, numTypes);
}
private NFCompressedGraphLongPointers deserializeLongPointers(DataInputStream dis, int numTypes) throws IOException {
NFCompressedGraphLongPointers pointers = new NFCompressedGraphLongPointers();
for(int i=0;i<numTypes;i++) {
String nodeType = dis.readUTF();
pointers.addPointers(nodeType, deserializeLongPointerArray(dis));
}
return pointers;
}
private long[] deserializeLongPointerArray(DataInputStream dis) throws IOException {
int numNodes = dis.readInt();
int numBytes = dis.readInt();
byte data[] = new byte[numBytes];
long pointers[] = new long[numNodes];
dis.readFully(data);
ByteArrayReader reader = new ByteArrayReader(new SimpleByteArray(data), 0);
long currentPointer = 0;
for(int i=0;i<numNodes;i++) {
long vInt = reader.readVLong();
if(vInt == -1) {
pointers[i] = -1;
} else {
currentPointer += vInt;
pointers[i] = currentPointer;
}
}
return pointers;
}
private NFCompressedGraphIntPointers deserializeIntPointers(DataInputStream dis, int numTypes) throws IOException {
NFCompressedGraphIntPointers pointers = new NFCompressedGraphIntPointers();
for(int i=0;i<numTypes;i++) {
String nodeType = dis.readUTF();
pointers.addPointers(nodeType, deserializeIntPointerArray(dis));
}
return pointers;
}
private int[] deserializeIntPointerArray(DataInputStream dis) throws IOException {
int numNodes = dis.readInt();
int numBytes = dis.readInt();
byte data[] = new byte[numBytes];
int pointers[] = new int[numNodes];
dis.readFully(data);
ByteArrayReader reader = new ByteArrayReader(new SimpleByteArray(data), 0);
long currentPointer = 0;
for(int i=0;i<numNodes;i++) {
int vInt = reader.readVInt();
if(vInt == -1) {
pointers[i] = -1;
} else {
currentPointer += vInt;
pointers[i] = (int)currentPointer;
}
}
return pointers;
}
}
| 9,747 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/serializer/NFCompressedGraphDeserializer.java | /*
* Copyright 2013-2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
import com.netflix.nfgraph.NFGraphModelHolder;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
import com.netflix.nfgraph.spec.NFPropertySpec;
import com.netflix.nfgraph.util.ByteData;
import com.netflix.nfgraph.util.ByteSegmentPool;
import com.netflix.nfgraph.util.SegmentedByteArray;
import com.netflix.nfgraph.util.SimpleByteArray;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* This class is used by {@link NFCompressedGraph#readFrom(InputStream)}.<p>
*
* It is unlikely that this class will need to be used externally.
*/
public class NFCompressedGraphDeserializer {
private final NFCompressedGraphPointersDeserializer pointersDeserializer = new NFCompressedGraphPointersDeserializer();
public NFCompressedGraph deserialize(InputStream is) throws IOException {
return deserialize(is, null);
}
public NFCompressedGraph deserialize(InputStream is, ByteSegmentPool byteSegmentPool) throws IOException {
DataInputStream dis = new DataInputStream(is);
NFGraphSpec spec = deserializeSpec(dis);
NFGraphModelHolder models = deserializeModels(dis);
NFCompressedGraphPointers pointers = pointersDeserializer.deserializePointers(dis);
long dataLength = deserializeDataLength(dis);
ByteData data = deserializeData(dis, dataLength, byteSegmentPool);
return new NFCompressedGraph(spec, models, data, dataLength, pointers);
}
private NFGraphSpec deserializeSpec(DataInputStream dis) throws IOException {
int numNodes = dis.readInt();
NFNodeSpec nodeSpecs[] = new NFNodeSpec[numNodes];
for(int i=0;i<numNodes;i++) {
String nodeTypeName = dis.readUTF();
int numProperties = dis.readInt();
NFPropertySpec propertySpecs[] = new NFPropertySpec[numProperties];
for(int j=0;j<numProperties;j++) {
String propertyName = dis.readUTF();
String toNodeType = dis.readUTF();
boolean isGlobal = dis.readBoolean();
boolean isMultiple = dis.readBoolean();
boolean isHashed = dis.readBoolean();
propertySpecs[j] = new NFPropertySpec(propertyName, toNodeType, isGlobal, isMultiple, isHashed);
}
nodeSpecs[i] = new NFNodeSpec(nodeTypeName, propertySpecs);
}
return new NFGraphSpec(nodeSpecs);
}
private NFGraphModelHolder deserializeModels(DataInputStream dis) throws IOException {
int numModels = dis.readInt();
NFGraphModelHolder modelHolder = new NFGraphModelHolder();
for(int i=0;i<numModels;i++) {
modelHolder.getModelIndex(dis.readUTF());
}
return modelHolder;
}
/// Backwards compatibility: If the data length is greater than Integer.MAX_VALUE, then
/// -1 is serialized as an int before a long containing the actual length.
private long deserializeDataLength(DataInputStream dis) throws IOException {
int dataLength = dis.readInt();
if(dataLength == -1) {
return dis.readLong();
}
return dataLength;
}
private ByteData deserializeData(DataInputStream dis, long dataLength, ByteSegmentPool memoryPool) throws IOException {
if(dataLength >= 0x20000000 || memoryPool != null) {
SegmentedByteArray data = memoryPool == null ? new SegmentedByteArray(14) : new SegmentedByteArray(memoryPool);
data.readFrom(dis, dataLength);
return data;
} else {
byte data[] = new byte[(int)dataLength];
dis.readFully(data);
return new SimpleByteArray(data);
}
}
} | 9,748 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/serializer/NFCompressedGraphSerializer.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
import com.netflix.nfgraph.NFGraphModelHolder;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
import com.netflix.nfgraph.spec.NFPropertySpec;
import com.netflix.nfgraph.util.ByteData;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
/**
* This class is used by {@link NFCompressedGraph#writeTo(OutputStream)}.<p>
*
* It is unlikely that this class will need to be used externally.
*/
public class NFCompressedGraphSerializer {
private final NFGraphSpec spec;
private final NFGraphModelHolder modelHolder;
private final NFCompressedGraphPointersSerializer pointersSerializer;
private final ByteData data;
private final long dataLength;
public NFCompressedGraphSerializer(NFGraphSpec spec, NFGraphModelHolder modelHolder, NFCompressedGraphPointers pointers, ByteData data, long dataLength) {
this.spec = spec;
this.modelHolder = modelHolder;
this.pointersSerializer = new NFCompressedGraphPointersSerializer(pointers, dataLength);
this.data = data;
this.dataLength = dataLength;
}
public void serializeTo(OutputStream os) throws IOException {
DataOutputStream dos = new DataOutputStream(os);
serializeSpec(dos);
serializeModels(dos);
pointersSerializer.serializePointers(dos);
serializeData(dos);
dos.flush();
}
private void serializeSpec(DataOutputStream dos) throws IOException {
dos.writeInt(spec.size());
for(NFNodeSpec nodeSpec : spec) {
dos.writeUTF(nodeSpec.getNodeTypeName());
dos.writeInt(nodeSpec.getPropertySpecs().length);
for(NFPropertySpec propertySpec : nodeSpec.getPropertySpecs()) {
dos.writeUTF(propertySpec.getName());
dos.writeUTF(propertySpec.getToNodeType());
dos.writeBoolean(propertySpec.isGlobal());
dos.writeBoolean(propertySpec.isMultiple());
dos.writeBoolean(propertySpec.isHashed());
}
}
}
private void serializeModels(DataOutputStream dos) throws IOException {
dos.writeInt(modelHolder.size());
for(String model : modelHolder) {
dos.writeUTF(model);
}
}
private void serializeData(DataOutputStream dos) throws IOException {
/// In order to maintain backwards compatibility of produced artifacts,
/// if more than Integer.MAX_VALUE bytes are required in the data,
/// first serialize a negative 1 integer, then serialize the number
/// of required bits as a long.
if(dataLength > Integer.MAX_VALUE) {
dos.writeInt(-1);
dos.writeLong(dataLength);
} else {
dos.writeInt((int)dataLength);
}
data.writeTo(dos, dataLength);
}
} | 9,749 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/serializer/NFCompressedGraphPointersSerializer.java | /*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.serializer;
import com.netflix.nfgraph.compressed.NFCompressedGraphPointers;
import com.netflix.nfgraph.util.ByteArrayBuffer;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Map;
public class NFCompressedGraphPointersSerializer {
private final NFCompressedGraphPointers pointers;
private final long dataLength;
NFCompressedGraphPointersSerializer(NFCompressedGraphPointers pointers, long dataLength) {
this.pointers = pointers;
this.dataLength = dataLength;
}
void serializePointers(DataOutputStream dos) throws IOException {
int numNodeTypes = pointers.asMap().size();
if(dataLength > 0xFFFFFFFFL)
numNodeTypes |= Integer.MIN_VALUE;
/// In order to maintain backwards compatibility of produced artifacts,
/// if more than 32 bits is required to represent the pointers, then flag
/// the sign bit in the serialized number of node types.
dos.writeInt(numNodeTypes);
for(Map.Entry<String, long[]>entry : pointers.asMap().entrySet()) {
dos.writeUTF(entry.getKey());
serializePointerArray(dos, entry.getValue());
}
}
private void serializePointerArray(DataOutputStream dos, long pointers[]) throws IOException {
ByteArrayBuffer buf = new ByteArrayBuffer();
long currentPointer = 0;
for(int i=0;i<pointers.length;i++) {
if(pointers[i] == -1) {
buf.writeVInt(-1);
} else {
long delta = pointers[i] - currentPointer;
if(delta >= 0xFFFFFFFFL) {
buf.writeVLong(delta);
} else {
buf.writeVInt((int)delta);
}
currentPointer = pointers[i];
}
}
dos.writeInt(pointers.length);
dos.writeInt((int)buf.length());
buf.copyTo(dos);
}
}
| 9,750 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/util/ByteArrayReader.java | /*
* Copyright 2013-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.util;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
/**
* Used by the {@link NFCompressedGraph}, and various {@link OrdinalSet} and {@link OrdinalIterator} implementations to read the encoded graph data.<p>
*
* It is unlikely that this class will be required externally.
*/
public class ByteArrayReader {
private final ByteData data;
private long pointer;
private long startByte;
private long endByte = Integer.MAX_VALUE;
public ByteArrayReader(ByteData data, long pointer) {
this.data = data;
this.pointer = pointer;
this.startByte = pointer;
this.endByte = data.length();
}
public ByteArrayReader(ByteData data, long startByte, long endByte) {
this.data = data;
this.startByte = startByte;
this.endByte = endByte;
this.pointer = startByte;
}
/**
* @return the byte value at the given offset.
*/
public byte getByte(long offset) {
return data.get(startByte + offset);
}
/**
* Set the current offset of this reader.
*/
public void setPointer(long pointer) {
this.pointer = pointer;
}
/**
* Increment the current offset of this reader by numBytes.
*/
public void skip(long numBytes) {
pointer += numBytes;
}
/**
* @return a variable-byte integer at the current offset. The offset is incremented by the size of the returned integer.
*/
public int readVInt() {
if(pointer >= endByte)
return -1;
byte b = readByte();
if(b == (byte) 0x80)
return -1;
int value = b & 0x7F;
while ((b & 0x80) != 0) {
b = readByte();
value <<= 7;
value |= (b & 0x7F);
}
return value;
}
/**
* @return a variable-byte long at the current offset. The offset is incremented by the size of the returned long.
*/
public long readVLong() {
if(pointer >= endByte)
return -1;
byte b = readByte();
if(b == (byte) 0x80)
return -1;
long value = b & 0x7F;
while ((b & 0x80) != 0) {
b = readByte();
value <<= 7;
value |= (b & 0x7F);
}
return value;
}
/**
* @return the byte at the current offset. The offset is incremented by one.
*/
public byte readByte() {
return data.get(pointer++);
}
/**
* Sets the start byte of this reader to the current offset, then sets the end byte to the current offset + <code>remainingBytes</code>
*/
public void setRemainingBytes(int remainingBytes) {
this.startByte = pointer;
this.endByte = pointer + remainingBytes;
}
/**
* Sets the current offset of this reader to the start byte.
*/
public void reset() {
this.pointer = startByte;
}
/**
* @return the length of this reader.
*/
public long length() {
return endByte - startByte;
}
/**
* @return a copy of this reader. The copy will have the same underlying byte array, start byte, and end byte, but the current offset will be equal to the start byte.
*/
public ByteArrayReader copy() {
return new ByteArrayReader(data, startByte, endByte);
}
}
| 9,751 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/util/Mixer.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.util;
public class Mixer {
/**
* Thomas Wang's commonly used 32 bit mix function.
*/
public static int hashInt(int key) {
key = ~key + (key << 15);
key = key ^ (key >>> 12);
key = key + (key << 2);
key = key ^ (key >>> 4);
key = key * 2057;
key = key ^ (key >>> 16);
return key & Integer.MAX_VALUE;
}
}
| 9,752 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/util/OrdinalMap.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.util;
import java.util.Arrays;
import java.util.Iterator;
/**
* An <code>OrdinalMap</code> will generate and maintain a mapping between objects added and an integer value between
* 0 and n, where n is the number of objects in the map.<p>
*
* The values mapped to the objects will be the order in which the objects are inserted.<p>
*
* The <code>OrdinalMap</code> is memory-efficient and can retrieve an object given an ordinal, or an ordinal given an object, both in <code>O(1)</code> time.<p>
*
* If, for example, some application refers to graph nodes as Strings, the OrdinalMap can be used as follows:<p>
*
* <pre>
* {@code
*
* OrdinalMap<String> ordinalMap = new OrdinalMap<String>();
*
* int ord0 = ordinalMap.add("node0"); // returns 0
* int ord1 = ordinalMap.add("node1"); // returns 1
* int ord2 = ordinalMap.add("node2"); // returns 2
* int ord3 = ordinalMap.add("node1"); // returns 1
*
* ordinalMap.get("node2"); // returns 2
* ordinalMap.get(ord2); // returns "node2"
*
* }
* </pre>
*/
public class OrdinalMap<T> implements Iterable<T> {
private int hashedOrdinalArray[];
private T objectsByOrdinal[];
private int size;
public OrdinalMap() {
this(10);
}
@SuppressWarnings("unchecked")
public OrdinalMap(int expectedSize) {
int mapArraySize = 1 << (32 - Integer.numberOfLeadingZeros(expectedSize * 4 / 3));
int ordinalArraySize = mapArraySize * 3 / 4;
hashedOrdinalArray = newHashedOrdinalArray(mapArraySize);
objectsByOrdinal = (T[]) new Object[ordinalArraySize];
}
/**
* Add an object into this <code>OrdinalMap</code>. If the same object (or an {@link Object#equals(Object)} object) is
* already in the map, then no changes will be made.
*
* @return the ordinal of <code>obj</code>
*/
public int add(T obj) {
int ordinal = get(obj);
if(ordinal != -1)
return ordinal;
if(size == objectsByOrdinal.length)
growCapacity();
objectsByOrdinal[size] = obj;
hashOrdinalIntoArray(size, hashedOrdinalArray);
return size++;
}
/**
* @return the ordinal of an object previously added to the map. If the object has not been added to the map, returns -1 instead.
*/
public int get(T obj) {
int hash = Mixer.hashInt(obj.hashCode());
int bucket = hash % hashedOrdinalArray.length;
int ordinal = hashedOrdinalArray[bucket];
while(ordinal != -1) {
if(objectsByOrdinal[ordinal].equals(obj))
return ordinal;
bucket = (bucket + 1) % hashedOrdinalArray.length;
ordinal = hashedOrdinalArray[bucket];
}
return -1;
}
/**
* @return the object for a given ordinal. If the ordinal does not yet exist, returns null.
*/
public T get(int ordinal) {
if(ordinal >= size)
return null;
return objectsByOrdinal[ordinal];
}
/**
* @return the number of objects in this map.
*/
public int size() {
return size;
}
private void growCapacity() {
int newHashedOrdinalArray[] = newHashedOrdinalArray(hashedOrdinalArray.length * 2);
for(int i=0;i<objectsByOrdinal.length;i++) {
hashOrdinalIntoArray(i, newHashedOrdinalArray);
}
objectsByOrdinal = Arrays.copyOf(objectsByOrdinal, objectsByOrdinal.length * 2);
hashedOrdinalArray = newHashedOrdinalArray;
}
private void hashOrdinalIntoArray(int ordinal, int hashedOrdinalArray[]) {
int hash = Mixer.hashInt(objectsByOrdinal[ordinal].hashCode());
int bucket = hash % hashedOrdinalArray.length;
while(hashedOrdinalArray[bucket] != -1) {
bucket = (bucket + 1) % hashedOrdinalArray.length;
}
hashedOrdinalArray[bucket] = ordinal;
}
private int[] newHashedOrdinalArray(int length) {
int arr[] = new int[length];
Arrays.fill(arr, -1);
return arr;
}
/**
* @return an {@link Iterator} over the objects in this mapping.
*/
@Override
public Iterator<T> iterator() {
return new ArrayIterator<T>(objectsByOrdinal, size);
}
}
| 9,753 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/util/ByteData.java | /*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.util;
import java.io.IOException;
import java.io.OutputStream;
public interface ByteData {
public void set(long idx, byte b);
public byte get(long idx);
public long length();
public void writeTo(OutputStream os, long length) throws IOException;
}
| 9,754 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/util/SimpleByteArray.java | /*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.util;
import java.io.IOException;
import java.io.OutputStream;
public class SimpleByteArray implements ByteData {
private final byte data[];
public SimpleByteArray(int length) {
this.data = new byte[length];
}
public SimpleByteArray(byte[] data) {
this.data = data;
}
@Override
public void set(long idx, byte b) {
data[(int)idx] = b;
}
@Override
public byte get(long idx) {
return data[(int)idx];
}
@Override
public long length() {
return data.length;
}
@Override
public void writeTo(OutputStream os, long length) throws IOException {
os.write(data, 0, (int)length);
}
}
| 9,755 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/util/SegmentedByteArray.java | /*
* Copyright 2014-2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.util;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
public class SegmentedByteArray implements ByteData {
private byte[][] segments;
private final ByteSegmentPool memoryPool;
private final int log2OfSegmentSize;
private final int bitmask;
private long length;
public SegmentedByteArray(int log2OfSegmentSize) {
this(new ByteSegmentPool(log2OfSegmentSize));
}
public SegmentedByteArray(ByteSegmentPool memoryPool) {
this.segments = new byte[2][];
this.memoryPool = memoryPool;
this.log2OfSegmentSize = memoryPool.getLog2OfSegmentSize();
this.bitmask = (1 << log2OfSegmentSize) - 1;
this.length = 0;
}
/**
* Set the byte at the given index to the specified value
*/
public void set(long index, byte value) {
int segmentIndex = (int)(index >> log2OfSegmentSize);
ensureCapacity(segmentIndex);
segments[segmentIndex][(int)(index & bitmask)] = value;
}
/**
* Get the value of the byte at the specified index.
*/
public byte get(long index) {
return segments[(int)(index >>> log2OfSegmentSize)][(int)(index & bitmask)];
}
/**
* For a SegmentedByteArray, this is a faster copy implementation.
*
* @param src
* @param srcPos
* @param destPos
* @param length
*/
public void copy(SegmentedByteArray src, long srcPos, long destPos, long length) {
int segmentLength = 1 << log2OfSegmentSize;
int currentSegment = (int)(destPos >>> log2OfSegmentSize);
int segmentStartPos = (int)(destPos & bitmask);
int remainingBytesInSegment = segmentLength - segmentStartPos;
while(length > 0) {
int bytesToCopyFromSegment = (int)Math.min(remainingBytesInSegment, length);
ensureCapacity(currentSegment);
int copiedBytes = src.copy(srcPos, segments[currentSegment], segmentStartPos, bytesToCopyFromSegment);
srcPos += copiedBytes;
length -= copiedBytes;
segmentStartPos = 0;
remainingBytesInSegment = segmentLength;
currentSegment++;
}
}
/**
* copies exactly data.length bytes from this SegmentedByteArray into the provided byte array
*
* @return the number of bytes copied
*/
public int copy(long srcPos, byte[] data, int destPos, int length) {
int segmentSize = 1 << log2OfSegmentSize;
int remainingBytesInSegment = (int)(segmentSize - (srcPos & bitmask));
int dataPosition = destPos;
while(length > 0) {
byte[] segment = segments[(int)(srcPos >>> log2OfSegmentSize)];
int bytesToCopyFromSegment = Math.min(remainingBytesInSegment, length);
System.arraycopy(segment, (int)(srcPos & bitmask), data, dataPosition, bytesToCopyFromSegment);
dataPosition += bytesToCopyFromSegment;
srcPos += bytesToCopyFromSegment;
remainingBytesInSegment = segmentSize - (int)(srcPos & bitmask);
length -= bytesToCopyFromSegment;
}
return dataPosition - destPos;
}
public void readFrom(InputStream is, long length) throws IOException {
int segmentSize = 1 << log2OfSegmentSize;
int segment = 0;
while(length > 0) {
ensureCapacity(segment);
long bytesToCopy = Math.min(segmentSize, length);
long bytesCopied = 0;
while(bytesCopied < bytesToCopy) {
bytesCopied += is.read(segments[segment], (int)bytesCopied, (int)(bytesToCopy - bytesCopied));
}
segment++;
length -= bytesCopied;
}
}
@Override
public void writeTo(OutputStream os, long length) throws IOException {
writeTo(os, 0, length);
}
/**
* Write a portion of this data to an OutputStream.
*/
public void writeTo(OutputStream os, long startPosition, long len) throws IOException {
int segmentSize = 1 << log2OfSegmentSize;
int remainingBytesInSegment = segmentSize - (int)(startPosition & bitmask);
long remainingBytesInCopy = len;
while(remainingBytesInCopy > 0) {
long bytesToCopyFromSegment = Math.min(remainingBytesInSegment, remainingBytesInCopy);
os.write(segments[(int)(startPosition >>> log2OfSegmentSize)], (int)(startPosition & bitmask), (int)bytesToCopyFromSegment);
startPosition += bytesToCopyFromSegment;
remainingBytesInSegment = segmentSize - (int)(startPosition & bitmask);
remainingBytesInCopy -= bytesToCopyFromSegment;
}
}
/**
* Ensures that the segment at segmentIndex exists
*
* @param segmentIndex
*/
private void ensureCapacity(int segmentIndex) {
while(segmentIndex >= segments.length) {
segments = Arrays.copyOf(segments, segments.length * 3 / 2);
}
long numSegmentsPopulated = length >> log2OfSegmentSize;
for(long i=numSegmentsPopulated; i <= segmentIndex; i++) {
segments[(int)i] = memoryPool.getSegment();
length += 1 << log2OfSegmentSize;
}
}
@Override
public long length() {
return length;
}
/**
* Note that this is NOT thread safe.
*/
public void destroy() {
for(byte[] segment : segments) {
memoryPool.returnSegment(segment);
}
}
}
| 9,756 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/util/ArrayIterator.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.util;
import java.util.Iterator;
/**
* A simple implementation of {@link Iterator} over an array of objects.
*
* It is unlikely that this class will be required externally.
*/
public class ArrayIterator<T> implements Iterator<T> {
private T arr[];
private int size;
private int counter = 0;
public ArrayIterator(T arr[]) {
this(arr, arr.length);
}
public ArrayIterator(T arr[], int size) {
this.arr = arr;
this.size = size;
}
@Override
public boolean hasNext() {
return counter < size;
}
@Override
public T next() {
return arr[counter++];
}
@Override
public void remove() {
throw new UnsupportedOperationException("Cannot remove elements from this array.");
}
}
| 9,757 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/util/ByteSegmentPool.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.util;
import java.util.Arrays;
import java.util.LinkedList;
/**
* This is a memory pool which can be used to allocate and reuse memory for deserialized NFCompressedGraph data.
*
* Note that this is NOT thread safe, and it is up to implementations to ensure that only a single update thread
* is accessing this memory pool at any given time.
*/
public class ByteSegmentPool {
private final LinkedList<byte[]> pooledSegments;
private final int log2OfSegmentSize;
public ByteSegmentPool(int log2OfSegmentSize) {
this.pooledSegments = new LinkedList<>();
this.log2OfSegmentSize = log2OfSegmentSize;
}
public int getLog2OfSegmentSize() {
return log2OfSegmentSize;
}
public byte[] getSegment() {
if(pooledSegments.isEmpty())
return new byte[1 << log2OfSegmentSize];
try {
byte[] segment = pooledSegments.removeFirst();
Arrays.fill(segment, (byte)0);
return segment;
} catch(NullPointerException ex) {
throw ex;
}
}
public void returnSegment(byte[] segment) {
if(segment != null)
pooledSegments.addLast(segment);
}
}
| 9,758 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/util/ByteArrayBuffer.java | /*
* Copyright 2013-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.util;
import com.netflix.nfgraph.compressor.NFCompressedGraphBuilder;
import java.io.IOException;
import java.io.OutputStream;
/**
* A <code>ByteArrayBuffer</code> is used by the {@link NFCompressedGraphBuilder} to write data to a byte array.<p>
*
* It is unlikely that this class will need to be used externally.
*/
public class ByteArrayBuffer {
private final SegmentedByteArray data;
private long pointer;
public ByteArrayBuffer() {
this.data = new SegmentedByteArray(new ByteSegmentPool(14));
this.pointer = 0;
}
/**
* @deprecated Use zero-argument constructor instead.
*/
@Deprecated
public ByteArrayBuffer(int initialSize) {
this();
}
/**
* Copies the contents of the specified buffer into this buffer at the current position.
*/
public void write(ByteArrayBuffer buf) {
data.copy(buf.data, 0, pointer, buf.length());
pointer += buf.length();
}
/**
* Writes a variable-byte encoded integer to the byte array.
*/
public void writeVInt(int value) {
if(value == -1) {
writeByte((byte)0x80);
return;
} else if(value < 0) {
writeByte((byte)(0x80 | ((value >>> 28))));
writeByte((byte)(0x80 | ((value >>> 21) & 0x7F)));
writeByte((byte)(0x80 | ((value >>> 14) & 0x7F)));
writeByte((byte)(0x80 | ((value >>> 7) & 0x7F)));
writeByte((byte)(value & 0x7F));
} else {
if(value > 0x0FFFFFFF) writeByte((byte)(0x80 | ((value >>> 28))));
if(value > 0x1FFFFF) writeByte((byte)(0x80 | ((value >>> 21) & 0x7F)));
if(value > 0x3FFF) writeByte((byte)(0x80 | ((value >>> 14) & 0x7F)));
if(value > 0x7F) writeByte((byte)(0x80 | ((value >>> 7) & 0x7F)));
writeByte((byte)(value & 0x7F));
}
}
/**
* Writes a variable-byte encoded integer to the byte array.
*/
public void writeVLong(long value) {
if(value < 0) {
writeByte((byte)0x80);
return;
} else {
if(value > 0xFFFFFFFFFFFFFFL) writeByte((byte)(0x80 | ((value >>> 56) & 0x7FL)));
if(value > 0x1FFFFFFFFFFFFL) writeByte((byte)(0x80 | ((value >>> 49) & 0x7FL)));
if(value > 0x3FFFFFFFFFFL) writeByte((byte)(0x80 | ((value >>> 42) & 0x7FL)));
if(value > 0x7FFFFFFFFL) writeByte((byte)(0x80 | ((value >>> 35) & 0x7FL)));
if(value > 0xFFFFFFFL) writeByte((byte)(0x80 | ((value >>> 28) & 0x7FL)));
if(value > 0x1FFFFFL) writeByte((byte)(0x80 | ((value >>> 21) & 0x7FL)));
if(value > 0x3FFFL) writeByte((byte)(0x80 | ((value >>> 14) & 0x7FL)));
if(value > 0x7FL) writeByte((byte)(0x80 | ((value >>> 7) & 0x7FL)));
writeByte((byte)(value & 0x7F));
}
}
/**
* The current length of the written data, in bytes.
*/
public long length() {
return pointer;
}
/**
* Sets the length of the written data to 0.
*/
public void reset() {
pointer = 0;
}
/**
* @return The underlying SegmentedByteArray containing the written data.
*/
public SegmentedByteArray getData() {
return data;
}
/**
* Writes a byte of data.
*/
public void writeByte(byte b) {
data.set(pointer++, b);
}
/**
* Writes each byte of data, in order.
*/
public void write(byte[] data) {
for(int i=0;i<data.length;i++) {
writeByte(data[i]);
}
}
/**
* Copies the written data to the given <code>OutputStream</code>
*/
public void copyTo(OutputStream os) throws IOException {
data.writeTo(os, 0, pointer);
}
}
| 9,759 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressor/BitSetPropertyBuilder.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressor;
import static com.netflix.nfgraph.OrdinalIterator.NO_MORE_ORDINALS;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressed.BitSetOrdinalSet;
import com.netflix.nfgraph.util.ByteArrayBuffer;
/**
* This class is used by {@link NFCompressedGraphBuilder} to write sets of ordinals represented with bit sets.<p>
*
* It is unlikely that this class will need to be used externally.
*
* @see BitSetOrdinalSet
*/
public class BitSetPropertyBuilder {
private final ByteArrayBuffer buf;
public BitSetPropertyBuilder(ByteArrayBuffer buf) {
this.buf = buf;
}
public void buildProperty(OrdinalSet ordinals, int numBits) {
byte[] data = buildBitSetData(numBits, ordinals.iterator());
buf.write(data);
}
private byte[] buildBitSetData(int numBits, OrdinalIterator iter) {
int numBytes = ((numBits - 1) / 8) + 1;
byte data[] = new byte[numBytes];
int ordinal = iter.nextOrdinal();
while(ordinal != NO_MORE_ORDINALS) {
data[ordinal >> 3] |= (byte)(1 << (ordinal & 0x07));
ordinal = iter.nextOrdinal();
}
return data;
}
}
| 9,760 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressor/CompactPropertyBuilder.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressor;
import java.util.Arrays;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressed.CompactOrdinalSet;
import com.netflix.nfgraph.util.ByteArrayBuffer;
/**
* This class is used by {@link NFCompressedGraphBuilder} to write sets of ordinals represented as variable-byte deltas.<p>
*
* It is unlikely that this class will need to be used externally.
*
* @see CompactOrdinalSet
*/
public class CompactPropertyBuilder {
private final ByteArrayBuffer buf;
public CompactPropertyBuilder(ByteArrayBuffer buf) {
this.buf = buf;
}
public void buildProperty(OrdinalSet ordinalSet) {
int connectedOrdinals[] = ordinalSet.asArray();
Arrays.sort(connectedOrdinals);
int previousOrdinal = 0;
for(int i=0;i<connectedOrdinals.length;i++) {
buf.writeVInt(connectedOrdinals[i] - previousOrdinal);
previousOrdinal = connectedOrdinals[i];
}
}
}
| 9,761 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressor/NFCompressedGraphBuilder.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressor;
import com.netflix.nfgraph.NFGraphModelHolder;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.build.NFBuildGraph;
import com.netflix.nfgraph.build.NFBuildGraphNode;
import com.netflix.nfgraph.build.NFBuildGraphNodeCache;
import com.netflix.nfgraph.build.NFBuildGraphNodeList;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
import com.netflix.nfgraph.compressed.NFCompressedGraphLongPointers;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
import com.netflix.nfgraph.spec.NFPropertySpec;
import com.netflix.nfgraph.util.ByteArrayBuffer;
/**
* <code>NFCompressedGraphBuilder</code> is used by {@link NFBuildGraph#compress()} to create an {@link NFCompressedGraph}.<p>
*
* It is unlikely that this class will need to be used externally.
*/
public class NFCompressedGraphBuilder {
private final NFGraphSpec graphSpec;
private final NFBuildGraphNodeCache buildGraphNodeCache;
private final NFGraphModelHolder modelHolder;
private final ByteArrayBuffer graphBuffer;
private final ByteArrayBuffer modelBuffer;
private final ByteArrayBuffer fieldBuffer;
private final CompactPropertyBuilder compactPropertyBuilder;
private final HashedPropertyBuilder hashedPropertyBuilder;
private final BitSetPropertyBuilder bitSetPropertyBuilder;
private final NFCompressedGraphLongPointers compressedGraphPointers;
public NFCompressedGraphBuilder(NFGraphSpec graphSpec, NFBuildGraphNodeCache buildGraphNodeCache, NFGraphModelHolder modelHolder) {
this.graphSpec = graphSpec;
this.buildGraphNodeCache = buildGraphNodeCache;
this.modelHolder = modelHolder;
this.graphBuffer = new ByteArrayBuffer();
this.modelBuffer = new ByteArrayBuffer();
this.fieldBuffer = new ByteArrayBuffer();
this.compactPropertyBuilder = new CompactPropertyBuilder(fieldBuffer);
this.hashedPropertyBuilder = new HashedPropertyBuilder(fieldBuffer);
this.bitSetPropertyBuilder = new BitSetPropertyBuilder(fieldBuffer);
this.compressedGraphPointers = new NFCompressedGraphLongPointers();
}
public NFCompressedGraph buildGraph() {
for(String nodeType : graphSpec.getNodeTypes()) {
NFBuildGraphNodeList nodeOrdinals = buildGraphNodeCache.getNodes(nodeType);
addNodeType(nodeType, nodeOrdinals);
}
return new NFCompressedGraph(graphSpec, modelHolder, graphBuffer.getData(), graphBuffer.length(), compressedGraphPointers);
}
private void addNodeType(String nodeType, NFBuildGraphNodeList nodes) {
NFNodeSpec nodeSpec = graphSpec.getNodeSpec(nodeType);
long ordinalPointers[] = new long[nodes.size()];
for(int i=0;i<nodes.size();i++) {
NFBuildGraphNode node = nodes.get(i);
if(node != null) {
ordinalPointers[i] = graphBuffer.length();
serializeNode(node, nodeSpec);
} else {
ordinalPointers[i] = -1;
}
}
compressedGraphPointers.addPointers(nodeType, ordinalPointers);
}
private void serializeNode(NFBuildGraphNode node, NFNodeSpec nodeSpec) {
for(NFPropertySpec propertySpec : nodeSpec.getPropertySpecs()) {
serializeProperty(node, propertySpec);
}
}
private void serializeProperty(NFBuildGraphNode node, NFPropertySpec propertySpec) {
if(propertySpec.isConnectionModelSpecific()) {
for(int i=0;i<modelHolder.size();i++) {
serializeProperty(node, propertySpec, i, modelBuffer);
}
copyBuffer(modelBuffer, graphBuffer);
} else {
serializeProperty(node, propertySpec, 0, graphBuffer);
}
}
private void serializeProperty(NFBuildGraphNode node, NFPropertySpec propertySpec, int connectionModelIndex, ByteArrayBuffer toBuffer) {
if(propertySpec.isMultiple()) {
serializeMultipleProperty(node, propertySpec, connectionModelIndex, toBuffer);
} else {
int connection = node.getConnection(connectionModelIndex, propertySpec);
if(connection == -1) {
toBuffer.writeByte((byte)0x80);
} else {
toBuffer.writeVInt(connection);
}
}
}
private void serializeMultipleProperty(NFBuildGraphNode node, NFPropertySpec propertySpec, int connectionModelIndex, ByteArrayBuffer toBuffer) {
OrdinalSet connections = node.getConnectionSet(connectionModelIndex, propertySpec);
int numBitsInBitSet = buildGraphNodeCache.numNodes(propertySpec.getToNodeType());
int bitSetSize = ((numBitsInBitSet - 1) / 8) + 1;
if(connections.size() < bitSetSize) {
if(propertySpec.isHashed()) {
hashedPropertyBuilder.buildProperty(connections);
if(fieldBuffer.length() < bitSetSize) {
int log2BytesUsed = 32 - Integer.numberOfLeadingZeros((int)fieldBuffer.length());
toBuffer.writeByte((byte)log2BytesUsed);
toBuffer.write(fieldBuffer);
fieldBuffer.reset();
return;
}
} else {
compactPropertyBuilder.buildProperty(connections);
if(fieldBuffer.length() < bitSetSize) {
toBuffer.writeVInt((int)fieldBuffer.length());
toBuffer.write(fieldBuffer);
fieldBuffer.reset();
return;
}
}
fieldBuffer.reset();
}
bitSetPropertyBuilder.buildProperty(connections, numBitsInBitSet);
toBuffer.writeByte((byte)0x80);
toBuffer.write(fieldBuffer);
fieldBuffer.reset();
}
private void copyBuffer(ByteArrayBuffer from, ByteArrayBuffer to) {
to.writeVInt((int)from.length());
to.write(from);
from.reset();
}
} | 9,762 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressor/HashedPropertyBuilder.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressor;
import static com.netflix.nfgraph.OrdinalIterator.NO_MORE_ORDINALS;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressed.HashSetOrdinalSet;
import com.netflix.nfgraph.util.ByteArrayBuffer;
import com.netflix.nfgraph.util.Mixer;
/**
* This class is used by {@link NFCompressedGraphBuilder} to write sets of ordinals represented as as variable-byte hashed integer arrays.<p>
*
* It is unlikely that this class will need to be used externally.
*
* @see HashSetOrdinalSet
*/
public class HashedPropertyBuilder {
private ByteArrayBuffer buf;
public HashedPropertyBuilder(ByteArrayBuffer buf) {
this.buf = buf;
}
public void buildProperty(OrdinalSet ordinals) {
if(ordinals.size() == 0)
return;
byte data[] = buildHashedPropertyData(ordinals);
buf.write(data);
}
private byte[] buildHashedPropertyData(OrdinalSet ordinals) {
byte data[] = new byte[calculateByteArraySize(ordinals)];
OrdinalIterator iter = ordinals.iterator();
int ordinal = iter.nextOrdinal();
while(ordinal != NO_MORE_ORDINALS) {
put(ordinal, data);
ordinal = iter.nextOrdinal();
}
return data;
}
private void put(int value, byte data[]) {
value += 1;
int bucket = Mixer.hashInt(value) & (data.length - 1);
if (data[bucket] != 0) {
bucket = nextEmptyByte(data, bucket);
}
writeKey(value, bucket, data);
}
private void writeKey(int value, int offset, byte data[]) {
int numBytes = calculateVIntSize(value);
ensureSpaceIsAvailable(numBytes, offset, data);
writeVInt(value, offset, data, numBytes);
}
private void writeVInt(int value, int offset, byte data[], int numBytes) {
int b = (value >>> (7 * (numBytes - 1))) & 0x7F;
data[offset] = (byte)b;
offset = nextOffset(data.length, offset);
for (int i = numBytes - 2; i >= 0; i--) {
b = (value >>> (7 * i)) & 0x7F;
data[offset] = (byte)(b | 0x80);
offset = nextOffset(data.length, offset);
}
}
private int nextOffset(int length, int offset) {
offset++;
if (offset == length)
offset = 0;
return offset;
}
private int previousOffset(int length, int offset) {
offset--;
if (offset == -1)
offset = length - 1;
return offset;
}
private void ensureSpaceIsAvailable(int requiredSpace, int offset, byte data[]) {
int copySpaces = 0;
int foundSpace = 1;
int currentOffset = offset;
while (foundSpace < requiredSpace) {
currentOffset = nextOffset(data.length, currentOffset);
if (data[currentOffset] == 0) {
foundSpace++;
} else {
copySpaces++;
}
}
int moveToOffset = currentOffset;
currentOffset = previousOffset(data.length, currentOffset);
while (copySpaces > 0) {
if (data[currentOffset] != 0) {
data[moveToOffset] = data[currentOffset];
copySpaces--;
moveToOffset = previousOffset(data.length, moveToOffset);
}
currentOffset = previousOffset(data.length, currentOffset);
}
}
private int nextEmptyByte(byte data[], int offset) {
while (data[offset] != 0) {
offset = nextOffset(data.length, offset);
}
return offset;
}
private int calculateByteArraySize(OrdinalSet ordinals) {
int numPopulatedBytes = calculateNumPopulatedBytes(ordinals.iterator());
return calculateByteArraySizeAfterLoadFactor(numPopulatedBytes);
}
private int calculateNumPopulatedBytes(OrdinalIterator ordinalIterator) {
int totalSize = 0;
int ordinal = ordinalIterator.nextOrdinal();
while(ordinal != NO_MORE_ORDINALS) {
totalSize += calculateVIntSize(ordinal + 1);
ordinal = ordinalIterator.nextOrdinal();
}
return totalSize;
}
private int calculateVIntSize(int value) {
int numBitsSet = numBitsUsed(value);
return ((numBitsSet - 1) / 7) + 1;
}
private int calculateByteArraySizeAfterLoadFactor(int numPopulatedBytes) {
int desiredSizeAfterLoadFactor = (numPopulatedBytes * 4) / 3;
int nextPowerOfTwo = 1 << numBitsUsed(desiredSizeAfterLoadFactor);
return nextPowerOfTwo;
}
private int numBitsUsed(int value) {
return 32 - Integer.numberOfLeadingZeros(value);
}
}
| 9,763 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/spec/NFPropertySpec.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.spec;
import com.netflix.nfgraph.build.NFBuildGraph;
import com.netflix.nfgraph.compressed.BitSetOrdinalSet;
import com.netflix.nfgraph.compressed.CompactOrdinalSet;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
/**
* This class defines a specification for a single property.<p>
*
* The recommended interface for creating a property is to instantiate with the flag method below.<p>
*
* By default, an <code>NFPropertySpec</code> is {@link #GLOBAL}, {@link #MULTIPLE}, and {@link #COMPACT}.<p>
*
* <pre>
* {@code
* import static com.netflix.nfgraph.spec.NFPropertySpec.*;
*
* ...
*
* NFPropertySpec spec1 = new NFPropertySpec( "property1", "foreignNodeType1", MULTIPLE | HASH );
* NFPropertySpec spec2 = new NFPropertySpec( "property2", "foreignNodeType2", MULTIPLE | COMPACT | MODEL_SPECIFIC);
* NFPropertySpec spec3 = new NFPropertySpec( "property2", "foreignNodeType3", SINGLE );
* }
* </pre>
*
*/
public class NFPropertySpec {
/**
* A property spec instantiated with this flag will not be separable into connection models.
*/
public static final int GLOBAL = 0x00;
/**
* A property spec instantiated with this flag will be separable into connection models.
*/
public static final int MODEL_SPECIFIC = 0x01;
/**
* A property spec instantiated with this flag will be allowed multiple connections.
*/
public static final int MULTIPLE = 0x00;
/**
* A property spec instantiated with this flag will be allowed only a single connection.
*/
public static final int SINGLE = 0x02;
/**
* A {@link #MULTIPLE} property instantiated with this flag will be represented as a {@link BitSetOrdinalSet} in an {@link NFCompressedGraph}.
*
* @see BitSetOrdinalSet
*/
public static final int HASH = 0x04;
/**
* A {@link #MULTIPLE} property instantiated with this flag will be represented as a {@link CompactOrdinalSet} in an {@link NFCompressedGraph}.
*
* @see CompactOrdinalSet
*/
public static final int COMPACT = 0x00;
private final boolean isGlobal;
private final boolean isMultiple;
private final boolean isHashed;
private final String name;
private final String toNodeType;
private int propertyIndex;
/**
* The recommended constructor.
*
* @param name the name of the property.
* @param toNodeType the node type to which this property connects
* @param flags a bitwise-or of the various flags defined as constants in {@link NFPropertySpec}.<br>For example, a global, multiple, compact property would take the value <code>NFPropertySpec.GLOBAL | NFPropertySpec.MULTIPLE | NFPropertySpec.COMPACT</code>
*
*/
public NFPropertySpec(String name, String toNodeType, int flags) {
this.name = name;
this.toNodeType = toNodeType;
this.isGlobal = (flags & MODEL_SPECIFIC) == 0;
this.isMultiple = (flags & SINGLE) == 0;
this.isHashed = (flags & HASH) != 0;
}
public NFPropertySpec(String name, String toNodeType, boolean isGlobal, boolean isMultiple, boolean isHashed) {
this.name = name;
this.toNodeType = toNodeType;
this.isGlobal = isGlobal;
this.isMultiple = isMultiple;
this.isHashed = isHashed;
}
public boolean isConnectionModelSpecific() {
return !isGlobal;
}
public boolean isGlobal() {
return isGlobal;
}
public boolean isMultiple() {
return isMultiple;
}
public boolean isSingle() {
return !isMultiple;
}
public boolean isHashed() {
return isHashed;
}
public boolean isCompact() {
return !isHashed;
}
public String getName() {
return name;
}
public String getToNodeType() {
return toNodeType;
}
void setPropertyIndex(int propertyIndex) {
this.propertyIndex = propertyIndex;
}
/**
* Used by the {@link NFBuildGraph}.
*
* It is unlikely that this method will be required externally.
*/
public int getPropertyIndex() {
return this.propertyIndex;
}
}
| 9,764 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/spec/NFNodeSpec.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.spec;
import java.util.Iterator;
import com.netflix.nfgraph.exception.NFGraphException;
import com.netflix.nfgraph.util.ArrayIterator;
/**
* An <code>NFNodeSpec</code> specifies the schema for a node type.
*
* It is defined by a node name and a number of {@link NFPropertySpec}.
*/
public class NFNodeSpec implements Iterable<NFPropertySpec> {
private final String nodeTypeName;
private final NFPropertySpec propertySpecs[];
private final int numSingleProperties;
private final int numMultipleProperties;
/**
* The constructor for an <code>NFNodeSpec</code>.
*
* @param nodeTypeName the name of the node type
* @param propertySpecs a complete listing of the properties available for this node type.
*/
public NFNodeSpec(String nodeTypeName, NFPropertySpec... propertySpecs) {
this.nodeTypeName = nodeTypeName;
this.propertySpecs = propertySpecs;
int numSingleProperties = 0;
int numMultipleProperties = 0;
for(NFPropertySpec propertySpec : propertySpecs) {
propertySpec.setPropertyIndex(propertySpec.isSingle() ? numSingleProperties++ : numMultipleProperties++);
}
this.numSingleProperties = numSingleProperties;
this.numMultipleProperties = numMultipleProperties;
}
public String getNodeTypeName() {
return nodeTypeName;
}
public NFPropertySpec[] getPropertySpecs() {
return propertySpecs;
}
public NFPropertySpec getPropertySpec(String propertyName) {
for(NFPropertySpec spec : propertySpecs) {
if(spec.getName().equals(propertyName))
return spec;
}
throw new NFGraphException("Property " + propertyName + " is undefined for node type " + nodeTypeName);
}
public int getNumSingleProperties() {
return numSingleProperties;
}
public int getNumMultipleProperties() {
return numMultipleProperties;
}
@Override
public Iterator<NFPropertySpec> iterator() {
return new ArrayIterator<NFPropertySpec>(propertySpecs);
}
}
| 9,765 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/spec/NFGraphSpec.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.spec;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import com.netflix.nfgraph.exception.NFGraphException;
/**
* An <code>NFGraphSpec</code> defines the schema for a graph. It contains a mapping of node type
* name to {@link NFNodeSpec}s.<p>
*
* The example code below will create two node types "a" and "b". An "a" node can be connected to "b" nodes
* via the properties "a-to-one-b" and/or "a-to-many-b". A "b" node can be connected to "a" nodes via the property
* "b-to-many-a".<p>
*
* <pre>
* {@code
*
* NFGraphSpec spec = new NFGraphSpec(
* new NFNodeSpec(
* "a",
* new NFPropertySpec("a-to-one-b", "b", NFPropertySpec.SINGLE),
* new NFPropertySpec("a-to-many-b", "b", NFPropertySpec.MULTIPLE | NFPropertySpec.COMPACT)
* ),
* new NFNodeSpec(
* "b",
* new NFPropertySpec("b-to-many-a", "a", NFPropertySpec.MULTIPLE | NFPropertySpec.HASH)
* )
* );
*
* }
* </pre>
*
* @see NFNodeSpec
* @see NFPropertySpec
*/
public class NFGraphSpec implements Iterable<NFNodeSpec> {
private final Map<String, NFNodeSpec> nodeSpecs;
/**
* Instantiate a graph specification with no {@link NFNodeSpec}s.
*/
public NFGraphSpec() {
this.nodeSpecs = new HashMap<String, NFNodeSpec>();
}
/**
* Instantiate a graph specification with the given {@link NFNodeSpec}.
*/
public NFGraphSpec(NFNodeSpec... nodeTypes) {
this();
for(NFNodeSpec spec : nodeTypes) {
addNodeSpec(spec);
}
}
/**
* @return the {@link NFNodeSpec} for the specified node type.
*/
public NFNodeSpec getNodeSpec(String nodeType) {
NFNodeSpec spec = nodeSpecs.get(nodeType);
if(spec == null)
throw new NFGraphException("Node spec " + nodeType + " is undefined");
return spec;
}
/**
* Add a node type to this graph specification.
*/
public void addNodeSpec(NFNodeSpec nodeSpec) {
nodeSpecs.put(nodeSpec.getNodeTypeName(), nodeSpec);
}
/**
* @return the number of node types defined by this graph specification.
*/
public int size() {
return nodeSpecs.size();
}
/**
* @return a {@link List} containing the names of each of the node types.
*/
public List<String> getNodeTypes() {
return new ArrayList<String>(nodeSpecs.keySet());
}
/**
* Returns an {@link Iterator} over the {@link NFNodeSpec}s contained in this graph specification.
*/
@Override
public Iterator<NFNodeSpec> iterator() {
return nodeSpecs.values().iterator();
}
}
| 9,766 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressed/BitSetOrdinalSet.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressor.NFCompressedGraphBuilder;
import com.netflix.nfgraph.spec.NFPropertySpec;
import com.netflix.nfgraph.util.ByteArrayReader;
/**
* An implementation of {@link OrdinalSet}, returned for connections represented as a bit set in an {@link NFCompressedGraph}.<p>
*
* A bit set representation contains a single bit per ordinal in the type to which the connections point. If the bit at the
* position for a given ordinal is set, then there is a connection to that ordinal in this set.<p>
*
* Because determining membership in a set requires only checking whether the bit at a given position is set, <code>contains()</code>
* is an <code>O(1)</code> operation.<p>
*
* This representation will automatically be chosen for a set by the {@link NFCompressedGraphBuilder} when it requires fewer bytes than
* the configured representation (either {@link NFPropertySpec#COMPACT} or {@link NFPropertySpec#HASH}).
*
* @see <a href="https://github.com/Netflix/netflix-graph/wiki/Compact-representations">Compact Representations</a>
*/
public class BitSetOrdinalSet extends OrdinalSet {
private final ByteArrayReader reader;
public BitSetOrdinalSet(ByteArrayReader reader) {
this.reader = reader;
}
@Override
public boolean contains(int value) {
int offset = value >>> 3;
int mask = 1 << (value & 0x07);
if(offset >= reader.length())
return false;
return (reader.getByte(offset) & mask) != 0;
}
@Override
public OrdinalIterator iterator() {
return new BitSetOrdinalIterator(reader);
}
@Override
public int size() {
int cardinalitySum = 0;
for(int i=0;i<(reader.length());i++) {
cardinalitySum += BITS_SET_TABLE[reader.getByte(i) & 0xFF];
}
return cardinalitySum;
}
private static final int BITS_SET_TABLE[] = new int[256];
static {
for(int i=0;i<256;i++) {
BITS_SET_TABLE[i] = (i & 1) + BITS_SET_TABLE[i / 2];
}
}
}
| 9,767 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressed/CompactOrdinalIterator.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.util.ByteArrayReader;
/**
* An implementation of {@link OrdinalIterator} returned for ordinals represented as variable-byte deltas in an {@link NFCompressedGraph}.
*
* @see CompactOrdinalSet
*/
public class CompactOrdinalIterator implements OrdinalIterator {
private final ByteArrayReader arrayReader;
private int currentOrdinal = 0;
CompactOrdinalIterator(ByteArrayReader arrayReader) {
this.arrayReader = arrayReader;
}
@Override
public int nextOrdinal() {
int delta = arrayReader.readVInt();
if(delta == -1)
return NO_MORE_ORDINALS;
currentOrdinal += delta;
return currentOrdinal;
}
@Override
public void reset() {
arrayReader.reset();
currentOrdinal = 0;
}
@Override
public OrdinalIterator copy() {
return new CompactOrdinalIterator(arrayReader.copy());
}
@Override
public boolean isOrdered() {
return true;
}
} | 9,768 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressed/BitSetOrdinalIterator.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.util.ByteArrayReader;
/**
* An implementation of {@link OrdinalIterator} returned for ordinals represented as bit sets in an {@link NFCompressedGraph}.
*
* @see BitSetOrdinalSet
*/
public class BitSetOrdinalIterator implements OrdinalIterator {
private final ByteArrayReader reader;
public int offset;
public BitSetOrdinalIterator(ByteArrayReader reader) {
this.reader = reader;
}
/**
* {@inheritDoc}
*/
@Override
public int nextOrdinal() {
if(offset >>> 3 == reader.length())
return NO_MORE_ORDINALS;
skipToNextPopulatedByte();
while(moreBytesToRead()) {
if(testCurrentBit()) {
return offset++;
}
offset++;
}
return NO_MORE_ORDINALS;
}
/**
* {@inheritDoc}
*/
@Override
public void reset() {
offset = 0;
}
/**
* {@inheritDoc}
*/
@Override
public OrdinalIterator copy() {
return new BitSetOrdinalIterator(reader);
}
/**
* @return <code>true</code>
*/
@Override
public boolean isOrdered() {
return true;
}
private void skipToNextPopulatedByte() {
if(moreBytesToRead()
&& (currentByte() >>> (offset & 0x07)) == 0) {
offset += 0x08;
offset &= ~0x07;
while(moreBytesToRead() && currentByte() == 0)
offset += 0x08;
}
}
private boolean moreBytesToRead() {
return (offset >>> 3) < reader.length();
}
private boolean testCurrentBit() {
int b = currentByte();
return (b & (1 << (offset & 0x07))) != 0;
}
private byte currentByte() {
return reader.getByte(offset >>> 3);
}
}
| 9,769 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphLongPointers.java | /*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import java.util.HashMap;
import java.util.Map;
import com.netflix.nfgraph.exception.NFGraphException;
/**
* This class holds all of the offsets into the {@link NFCompressedGraph}'s byte array.<p>
*
* This class maintains a mapping of type name to int array. For a given type, the offset in the {@link NFCompressedGraph}'s byte array
* where the connections for a given node are encoded is equal to the value of the int array for the node's type at the index for the node's ordinal.<p>
*
* It is unlikely that this class will need to be used externally.
*/
public class NFCompressedGraphLongPointers implements NFCompressedGraphPointers {
private final Map<String, long[]>pointersByOrdinal;
public NFCompressedGraphLongPointers() {
this.pointersByOrdinal = new HashMap<String, long[]>();
}
/**
* @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
*/
public long getPointer(String nodeType, int ordinal) {
long pointers[] = pointersByOrdinal.get(nodeType);
if(pointers == null)
throw new NFGraphException("Undefined node type: " + nodeType);
if(ordinal < pointers.length)
return pointers[ordinal];
return -1;
}
public void addPointers(String nodeType, long pointers[]) {
pointersByOrdinal.put(nodeType, pointers);
}
public int numPointers(String nodeType) {
return pointersByOrdinal.get(nodeType).length;
}
public Map<String, long[]> asMap() {
return pointersByOrdinal;
}
}
| 9,770 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressed/CompactOrdinalSet.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import java.util.Arrays;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.spec.NFPropertySpec;
import com.netflix.nfgraph.util.ByteArrayReader;
/**
* An implementation of {@link OrdinalSet}, returned for connections represented as variable-byte deltas in an {@link NFCompressedGraph}.<p>
*
* A variable-byte delta representation contains between one and five bytes per connection.
* The ordinals in the set are sorted ascending, then encoded as the difference between each ordinal and the last ordinal.<p>
*
* For example, the values [ 7, 11, 13, 21 ] will be encoded as [ 7, 4, 2, 8 ].<p>
*
* This is done because smaller values can be represented in fewer bytes.<p>
*
* Because each value can only be derived using the previous value, <code>contains()</code> is an <code>O(n)</code> operation.<p>
*
* This representation for a connection set can be configured for an {@link NFPropertySpec} using {@link NFPropertySpec#COMPACT}.
*
* @see <a href="http://techblog.netflix.com/2013/01/netflixgraph-metadata-library_18.html">The Netflix Tech Blog</a>
* @see <a href="http://en.wikipedia.org/wiki/Variable-length_quantity">Variable-length quantity</a>
* @see <a href="https://github.com/Netflix/netflix-graph/wiki/Compact-representations">Compact Representations</a>
*/
public class CompactOrdinalSet extends OrdinalSet {
private final ByteArrayReader reader;
private int size = Integer.MIN_VALUE;
public CompactOrdinalSet(ByteArrayReader reader) {
this.reader = reader;
}
@Override
public boolean contains(int value) {
OrdinalIterator iter = iterator();
int iterValue = iter.nextOrdinal();
while(iterValue < value) {
iterValue = iter.nextOrdinal();
}
return iterValue == value;
}
@Override
public boolean containsAll(int... values) {
OrdinalIterator iter = iterator();
Arrays.sort(values);
int valuesIndex = 0;
int setValue = iter.nextOrdinal();
while(valuesIndex < values.length) {
if(setValue == values[valuesIndex]) {
valuesIndex++;
} else if(setValue < values[valuesIndex]) {
setValue = iter.nextOrdinal();
} else {
break;
}
}
return valuesIndex == values.length;
}
@Override
public OrdinalIterator iterator() {
return new CompactOrdinalIterator(reader.copy());
}
@Override
public int size() {
if(sizeIsUnknown())
size = countVInts(reader.copy());
return size;
}
private boolean sizeIsUnknown() {
return size == Integer.MIN_VALUE;
}
private int countVInts(ByteArrayReader myReader) {
int counter = 0;
while(myReader.readVInt() >= 0)
counter++;
return counter;
}
}
| 9,771 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressed/SingleOrdinalIterator.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import com.netflix.nfgraph.OrdinalIterator;
/**
* An implementation of {@link OrdinalIterator} which "iterates" over a single ordinal.
*/
public class SingleOrdinalIterator implements OrdinalIterator {
private final int ordinal;
private boolean returned;
public SingleOrdinalIterator(int ordinal) {
this.ordinal = ordinal;
}
@Override
public int nextOrdinal() {
if(returned)
return NO_MORE_ORDINALS;
returned = true;
return ordinal;
}
@Override
public void reset() {
returned = false;
}
@Override
public OrdinalIterator copy() {
return new SingleOrdinalIterator(ordinal);
}
@Override
public boolean isOrdered() {
return true;
}
}
| 9,772 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressed/HashSetOrdinalSet.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.spec.NFPropertySpec;
import com.netflix.nfgraph.util.ByteArrayReader;
import com.netflix.nfgraph.util.Mixer;
/**
* An implementation of {@link OrdinalSet}, returned for connections represented as variable-byte hashed integer arrays in an {@link NFCompressedGraph}.<p>
*
* A variable-byte hashed integer array representation contains between one and five bytes per connection. The ordinal for each
* connection is hashed into a byte array, then represented as a variant on the variable-byte integers used in the {@link CompactOrdinalSet}.<p>
*
* The byte array can be thought of as a open-addressed hash table, with each byte representing a single bucket. Because
* values may be represented in more than one byte, single values may spill over into multiple buckets. The beginning of the
* value is indicated by an unset sign bit, and will be located at or after the bucket to which it is hashed. If the value's
* first bit is not located at the hashed position, it will be located in a position after the bucket with no empty buckets in between.<p>
*
* This implementation provides <code>O(1)</code> time for <code>contains()</code>, but is not as memory-efficient as a {@link CompactOrdinalSet}.<p>
*
* This representation for a connection set can be configured for an {@link NFPropertySpec} using {@link NFPropertySpec#HASH}.
*
* @see <a href="https://github.com/Netflix/netflix-graph/wiki/Compact-representations">Compact Representations</a>
*
*/
public class HashSetOrdinalSet extends OrdinalSet {
private final ByteArrayReader reader;
private int size = Integer.MIN_VALUE;
public HashSetOrdinalSet(ByteArrayReader reader) {
this.reader = reader;
}
@Override
public OrdinalIterator iterator() {
return new HashSetOrdinalIterator(reader.copy());
}
@Override
public boolean contains(int value) {
value += 1;
int offset = (Mixer.hashInt(value) & ((int)reader.length() - 1));
offset = seekBeginByte(offset);
while(reader.getByte(offset) != 0) {
int readValue = reader.getByte(offset);
offset = nextOffset(offset);
while((reader.getByte(offset) & 0x80) != 0) {
readValue <<= 7;
readValue |= reader.getByte(offset) & 0x7F;
offset = nextOffset(offset);
}
if(readValue == value)
return true;
}
return false;
}
@Override
public int size() {
if(size == Integer.MIN_VALUE)
size = countHashEntries();
return size;
}
private int seekBeginByte(int offset) {
while((reader.getByte(offset) & 0x80) != 0)
offset = nextOffset(offset);
return offset;
}
private int nextOffset(int offset) {
offset++;
if(offset >= reader.length()) {
offset = 0;
}
return offset;
}
private int countHashEntries() {
int counter = 0;
for(int i=0;i<reader.length();i++) {
byte b = reader.getByte(i);
if(b != 0 && (b & 0x80) == 0)
counter++;
}
return counter;
}
}
| 9,773 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressed/HashSetOrdinalIterator.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.util.ByteArrayReader;
/**
* An implementation of {@link OrdinalIterator} returned for ordinals represented as variable-byte hashed integer arrays in an {@link NFCompressedGraph}.
*
* @see HashSetOrdinalSet
*/
public class HashSetOrdinalIterator implements OrdinalIterator {
private final ByteArrayReader reader;
private final int beginOffset;
private int offset = 0;
private boolean firstValue;
public HashSetOrdinalIterator(ByteArrayReader reader) {
this.reader = reader;
seekBeginByte();
this.beginOffset = offset;
firstValue = true;
}
@Override
public int nextOrdinal() {
seekBeginByte();
if(offset == beginOffset) {
if(!firstValue)
return NO_MORE_ORDINALS;
firstValue = false;
}
int value = reader.getByte(offset);
nextOffset();
while((reader.getByte(offset) & 0x80) != 0) {
value <<= 7;
value |= reader.getByte(offset) & 0x7F;
nextOffset();
}
return value - 1;
}
@Override
public void reset() {
offset = beginOffset;
firstValue = true;
}
@Override
public OrdinalIterator copy() {
return new HashSetOrdinalIterator(reader);
}
@Override
public boolean isOrdered() {
return false;
}
private void nextOffset() {
offset++;
if(offset >= reader.length()) {
offset = 0;
}
}
private void seekBeginByte() {
while((reader.getByte(offset) & 0x80) != 0 || reader.getByte(offset) == 0)
nextOffset();
}
}
| 9,774 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraph.java | /*
* Copyright 2013-2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import static com.netflix.nfgraph.OrdinalIterator.EMPTY_ITERATOR;
import static com.netflix.nfgraph.OrdinalSet.EMPTY_SET;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import com.netflix.nfgraph.NFGraph;
import com.netflix.nfgraph.NFGraphModelHolder;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.exception.NFGraphException;
import com.netflix.nfgraph.serializer.NFCompressedGraphDeserializer;
import com.netflix.nfgraph.serializer.NFCompressedGraphSerializer;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
import com.netflix.nfgraph.spec.NFPropertySpec;
import com.netflix.nfgraph.util.ByteArrayReader;
import com.netflix.nfgraph.util.ByteData;
import com.netflix.nfgraph.util.ByteSegmentPool;
import com.netflix.nfgraph.util.SegmentedByteArray;
/**
* A read-only, memory-efficient implementation of an {@link NFGraph}. The connections for all nodes in the graph
* are encoded into a single byte array. The encoding for each set will be specified as either a {@link CompactOrdinalSet} or
* {@link HashSetOrdinalSet}. If it is more efficient, the actual encoding will be a {@link BitSetOrdinalSet}.<p>
*
* The offsets into the byte array where connections for each node are encoded are held in the {@link NFCompressedGraphPointers}.
*/
public class NFCompressedGraph extends NFGraph {
private final NFCompressedGraphPointers pointers;
private final ByteData data;
private final long dataLength;
public NFCompressedGraph(NFGraphSpec spec, NFGraphModelHolder modelHolder, ByteData data, long dataLength, NFCompressedGraphPointers pointers) {
super(spec, modelHolder);
this.data = data;
this.dataLength = dataLength;
this.pointers = pointers;
}
@Override
protected int getConnection(int connectionModelIndex, String nodeType, int ordinal, String propertyName) {
ByteArrayReader reader = reader(nodeType, ordinal);
if(reader != null) {
NFPropertySpec propertySpec = pointReaderAtProperty(reader, nodeType, propertyName, connectionModelIndex);
if(propertySpec != null) {
if(propertySpec.isSingle())
return reader.readVInt();
int firstOrdinal = iterator(nodeType, reader, propertySpec).nextOrdinal();
if(firstOrdinal != OrdinalIterator.NO_MORE_ORDINALS)
return firstOrdinal;
}
}
return -1;
}
@Override
protected OrdinalSet getConnectionSet(int connectionModelIndex, String nodeType, int ordinal, String propertyName) {
ByteArrayReader reader = reader(nodeType, ordinal);
if(reader != null) {
NFPropertySpec propertySpec = pointReaderAtProperty(reader, nodeType, propertyName, connectionModelIndex);
if (propertySpec != null) {
return set(nodeType, reader, propertySpec);
}
}
return EMPTY_SET;
}
@Override
protected OrdinalIterator getConnectionIterator(int connectionModelIndex, String nodeType, int ordinal, String propertyName) {
ByteArrayReader reader = reader(nodeType, ordinal);
if(reader != null) {
NFPropertySpec propertySpec = pointReaderAtProperty(reader, nodeType, propertyName, connectionModelIndex);
if (propertySpec != null) {
return iterator(nodeType, reader, propertySpec);
}
}
return EMPTY_ITERATOR;
}
NFCompressedGraphPointers getPointers() {
return pointers;
}
private OrdinalSet set(String nodeType, ByteArrayReader reader, NFPropertySpec propertySpec) {
if(propertySpec.isSingle())
return new SingleOrdinalSet(reader.readVInt());
int size = reader.readVInt();
if(size == -1) {
int numBits = pointers.numPointers(propertySpec.getToNodeType());
int numBytes = ((numBits - 1) / 8) + 1;
reader.setRemainingBytes(numBytes);
return new BitSetOrdinalSet(reader);
}
if(size == 0)
return EMPTY_SET;
if(propertySpec.isHashed()) {
reader.setRemainingBytes(1 << (size - 1));
return new HashSetOrdinalSet(reader);
}
reader.setRemainingBytes(size);
return new CompactOrdinalSet(reader);
}
private OrdinalIterator iterator(String nodeType, ByteArrayReader reader, NFPropertySpec propertySpec) {
if(propertySpec.isSingle())
return new SingleOrdinalIterator(reader.readVInt());
int size = reader.readVInt();
if(size == -1) {
int numBits = pointers.numPointers(propertySpec.getToNodeType());
int numBytes = ((numBits - 1) / 8) + 1;
reader.setRemainingBytes(numBytes);
return new BitSetOrdinalIterator(reader);
}
if(size == 0)
return EMPTY_ITERATOR;
if(propertySpec.isHashed()) {
reader.setRemainingBytes(1 << (size - 1));
return new HashSetOrdinalIterator(reader);
}
reader.setRemainingBytes(size);
return new CompactOrdinalIterator(reader);
}
private ByteArrayReader reader(String nodeType, int ordinal) {
long pointer = pointers.getPointer(nodeType, ordinal);
if(pointer == -1)
return null;
return new ByteArrayReader(data, pointer);
}
private NFPropertySpec pointReaderAtProperty(ByteArrayReader reader, String nodeType, String propertyName, int connectionModelIndex) {
NFNodeSpec nodeSpec = graphSpec.getNodeSpec(nodeType);
for (NFPropertySpec propertySpec : nodeSpec.getPropertySpecs()) {
if (propertySpec.getName().equals(propertyName)) {
if(propertySpec.isConnectionModelSpecific())
positionForModel(reader, connectionModelIndex, propertySpec);
return propertySpec;
} else {
skipProperty(reader, propertySpec);
}
}
throw new NFGraphException("Property " + propertyName + " is undefined for node type " + nodeType);
}
private void positionForModel(ByteArrayReader reader, int connectionModelIndex, NFPropertySpec propertySpec) {
reader.setRemainingBytes(reader.readVInt());
for(int i=0;i<connectionModelIndex;i++) {
skipSingleProperty(reader, propertySpec);
}
}
private void skipProperty(ByteArrayReader reader, NFPropertySpec propertySpec) {
if(propertySpec.isConnectionModelSpecific()) {
int size = reader.readVInt();
reader.skip(size);
} else {
skipSingleProperty(reader, propertySpec);
}
}
private void skipSingleProperty(ByteArrayReader reader, NFPropertySpec propertySpec) {
if(propertySpec.isSingle()) {
reader.readVInt();
return;
}
int size = reader.readVInt();
if(size == 0)
return;
if(size == -1) {
int numBits = pointers.numPointers(propertySpec.getToNodeType());
int numBytes = ((numBits - 1) / 8) + 1;
reader.skip(numBytes);
return;
}
if(propertySpec.isHashed()) {
reader.skip(1 << (size - 1));
return;
}
reader.skip(size);
}
public void writeTo(OutputStream os) throws IOException {
NFCompressedGraphSerializer serializer = new NFCompressedGraphSerializer(graphSpec, modelHolder, pointers, data, dataLength);
serializer.serializeTo(os);
}
public static NFCompressedGraph readFrom(InputStream is) throws IOException {
return readFrom(is, null);
}
/**
* When using a {@link ByteSegmentPool}, this method will borrow arrays used to construct the NFCompressedGraph from that pool.
* <p>
* Note that because the {@link ByteSegmentPool} is NOT thread-safe, this this call is also NOT thread-safe.
* It is up to implementations to ensure that only a single update thread
* is accessing this memory pool at any given time.
*/
public static NFCompressedGraph readFrom(InputStream is, ByteSegmentPool memoryPool) throws IOException {
NFCompressedGraphDeserializer deserializer = new NFCompressedGraphDeserializer();
return deserializer.deserialize(is, memoryPool);
}
/**
* When using a {@link ByteSegmentPool}, this method will return all borrowed arrays back to that pool.
* <p>
* Note that because the {@link ByteSegmentPool} is NOT thread-safe, this this call is also NOT thread-safe.
* It is up to implementations to ensure that only a single update thread
* is accessing this memory pool at any given time.
*/
public void destroy() {
if(data instanceof SegmentedByteArray)
((SegmentedByteArray) data).destroy();
}
}
| 9,775 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphPointers.java | /*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import java.util.Map;
public interface NFCompressedGraphPointers {
/**
* @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
*/
public long getPointer(String nodeType, int ordinal);
public int numPointers(String nodeType);
public Map<String, long[]> asMap();
}
| 9,776 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressed/SingleOrdinalSet.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
/**
* An implementation of {@link OrdinalSet} containing a single ordinal.
*/
public class SingleOrdinalSet extends OrdinalSet {
private final int ordinal;
public SingleOrdinalSet(int ordinal) {
this.ordinal = ordinal;
}
@Override
public boolean contains(int value) {
return ordinal == value;
}
@Override
public int[] asArray() {
return new int[] { ordinal };
}
@Override
public OrdinalIterator iterator() {
return new SingleOrdinalIterator(ordinal);
}
@Override
public int size() {
return 1;
}
}
| 9,777 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/compressed/NFCompressedGraphIntPointers.java | /*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.compressed;
import java.util.HashMap;
import java.util.Map;
import com.netflix.nfgraph.exception.NFGraphException;
/**
* This class holds all of the offsets into the {@link NFCompressedGraph}'s byte array.<p>
*
* This class maintains a mapping of type name to int array. For a given type, the offset in the {@link NFCompressedGraph}'s byte array
* where the connections for a given node are encoded is equal to the value of the int array for the node's type at the index for the node's ordinal.<p>
*
* It is unlikely that this class will need to be used externally.
*/
public class NFCompressedGraphIntPointers implements NFCompressedGraphPointers {
private final Map<String, int[]>pointersByOrdinal;
public NFCompressedGraphIntPointers() {
this.pointersByOrdinal = new HashMap<String, int[]>();
}
/**
* @return the offset into the {@link NFCompressedGraph}'s byte array for the node identified by the given type and ordinal.
*/
public long getPointer(String nodeType, int ordinal) {
int pointers[] = pointersByOrdinal.get(nodeType);
if(pointers == null)
throw new NFGraphException("Undefined node type: " + nodeType);
if(ordinal < pointers.length) {
if(pointers[ordinal] == -1)
return -1;
return 0xFFFFFFFFL & pointers[ordinal];
}
return -1;
}
public void addPointers(String nodeType, int pointers[]) {
pointersByOrdinal.put(nodeType, pointers);
}
public int numPointers(String nodeType) {
return pointersByOrdinal.get(nodeType).length;
}
@Override
public Map<String, long[]> asMap() {
Map<String, long[]> map = new HashMap<String, long[]>();
for(Map.Entry<String, int[]> entry : pointersByOrdinal.entrySet()) {
map.put(entry.getKey(), toLongArray(entry.getValue()));
}
return map;
}
private long[] toLongArray(int[] arr) {
long l[] = new long[arr.length];
for(int i=0;i<arr.length;i++) {
l[i] = arr[i];
}
return l;
}
}
| 9,778 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/build/NFBuildGraphOrdinalSet.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.build;
import java.util.Arrays;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
/**
* And implementation of {@link OrdinalSet} returned for connections in an {@link NFBuildGraph}.
*/
public class NFBuildGraphOrdinalSet extends OrdinalSet {
private final int ordinals[];
private final int size;
public NFBuildGraphOrdinalSet(int ordinals[], int size) {
this.ordinals = ordinals;
this.size = size;
}
/**
* {@inheritDoc}
*/
@Override
public boolean contains(int value) {
for(int i=0;i<size;i++) {
if(ordinals[i] == value) {
return true;
}
}
return false;
}
/**
* {@inheritDoc}
*/
@Override
public int[] asArray() {
return Arrays.copyOf(ordinals, size);
}
/**
* {@inheritDoc}
*/
@Override
public OrdinalIterator iterator() {
return new NFBuildGraphOrdinalIterator(ordinals, size);
}
/**
* {@inheritDoc}
*/
@Override
public int size() {
return size;
}
}
| 9,779 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/build/NFBuildGraph.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.build;
import static com.netflix.nfgraph.NFGraphModelHolder.CONNECTION_MODEL_GLOBAL;
import com.netflix.nfgraph.NFGraph;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressed.NFCompressedGraph;
import com.netflix.nfgraph.compressor.NFCompressedGraphBuilder;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
import com.netflix.nfgraph.spec.NFPropertySpec;
/**
* An <code>NFBuildGraph</code> is used to create a new graph. This representation of the graph data is not especially memory-efficient,
* and is intended to exist only for a short time while the {@link NFGraph} is being populated.<p>
*
* Once the graph is completely populated, it is expected that this <code>NFBuildGraph</code> will be exchanged for a memory-efficient,
* read-only {@link NFCompressedGraph} via the <code>compress()</code> method.<p>
*
* See {@link NFGraph} for an example of code which creates and populates an <code>NFBuildGraph</code>
*
*
*/
public class NFBuildGraph extends NFGraph {
private final NFBuildGraphNodeCache nodeCache;
public NFBuildGraph(NFGraphSpec graphSpec) {
super(graphSpec);
this.nodeCache = new NFBuildGraphNodeCache(graphSpec, modelHolder);
}
@Override
protected int getConnection(int connectionModelIndex, String nodeType, int ordinal, String propertyName) {
NFBuildGraphNode node = nodeCache.getNode(nodeType, ordinal);
NFPropertySpec propertySpec = getPropertySpec(nodeType, propertyName);
return node.getConnection(connectionModelIndex, propertySpec);
}
@Override
protected OrdinalIterator getConnectionIterator(int connectionModelIndex, String nodeType, int ordinal, String propertyName) {
NFBuildGraphNode node = nodeCache.getNode(nodeType, ordinal);
NFPropertySpec propertySpec = getPropertySpec(nodeType, propertyName);
return node.getConnectionIterator(connectionModelIndex, propertySpec);
}
@Override
protected OrdinalSet getConnectionSet(int connectionModelIndex, String nodeType, int ordinal, String propertyName) {
NFBuildGraphNode node = nodeCache.getNode(nodeType, ordinal);
NFPropertySpec propertySpec = getPropertySpec(nodeType, propertyName);
return node.getConnectionSet(connectionModelIndex, propertySpec);
}
/**
* Add a connection to this graph. The connection will be from the node identified by the given <code>nodeType</code> and <code>fromOrdinal</code>.
* The connection will be via the specified <code>viaProperty</code> in the {@link NFNodeSpec} for the given <code>nodeType</code>.
* The connection will be to the node identified by the given <code>toOrdinal</code>. The type of the to node is implied by the <code>viaProperty</code>.
*/
public void addConnection(String nodeType, int fromOrdinal, String viaProperty, int toOrdinal) {
addConnection(CONNECTION_MODEL_GLOBAL, nodeType, fromOrdinal, viaProperty, toOrdinal);
}
/**
* Add a connection to this graph. The connection will be in the given connection model. The connection will be from the node identified by the given
* <code>nodeType</code> and <code>fromOrdinal</code>. The connection will be via the specified <code>viaProperty</code> in the {@link NFNodeSpec} for
* the given <code>nodeType</code>. The connection will be to the node identified by the given <code>toOrdinal</code>. The type of the to node is implied
* by the <code>viaProperty</code>.
*/
public void addConnection(String connectionModel, String nodeType, int fromOrdinal, String viaPropertyName, int toOrdinal) {
NFBuildGraphNode fromNode = nodeCache.getNode(nodeType, fromOrdinal);
NFPropertySpec propertySpec = getPropertySpec(nodeType, viaPropertyName);
int connectionModelIndex = modelHolder.getModelIndex(connectionModel);
NFBuildGraphNode toNode = nodeCache.getNode(propertySpec.getToNodeType(), toOrdinal);
addConnection(fromNode, propertySpec, connectionModelIndex, toNode);
}
/**
* Returns the list of {@link com.netflix.nfgraph.build.NFBuildGraphNode}s associated with the specified
* <code>nodeType</code>.
*/
public NFBuildGraphNodeList getNodes(String nodeType) {
return nodeCache.getNodes(nodeType);
}
/**
* Creates an {@link com.netflix.nfgraph.build.NFBuildGraphNode} for <code>nodeSpec</code> and <code>ordinal</code>
* and adds it to <code>nodes</code>. If such a node exists in <code>nodes</code>, then that node is returned.
*/
public NFBuildGraphNode getOrCreateNode(NFBuildGraphNodeList nodes, NFNodeSpec nodeSpec, int ordinal) {
return nodeCache.getNode(nodes, nodeSpec, ordinal);
}
/**
* Add a connection to this graph. This method is exposed for efficiency purposes. It is more efficient than
* {@code addConnection(String connectionModel, String nodeType, int fromOrdinal, String viaPropertyName, int toOrdinal)}
* as it avoids multiple lookups for <code>fromNode</code>, <code>propertySpec</code>, <code>connectionModelIndex</code>
* and <code>toNode</code>.
*/
public void addConnection(NFBuildGraphNode fromNode, NFPropertySpec propertySpec, int connectionModelIndex, NFBuildGraphNode toNode) {
fromNode.addConnection(connectionModelIndex, propertySpec, toNode.getOrdinal());
toNode.incrementNumIncomingConnections();
}
/**
* Add a connection model, identified by the parameter <code>connectionModel</code> to this graph.<p>
*
* Building the graph may be much more efficient if each connection model is added to the graph with this method
* prior to adding any connections.<p>
*
* This operation is not necessary, but may make building the graph more efficient.
*
* Returns the "model index" used to identify the connection model internally. Passing this to
* the various {@code addConnection()} may offer a performance boost while building the graph.
*/
public int addConnectionModel(String connectionModel) {
return modelHolder.getModelIndex(connectionModel);
}
/**
* Returns the {@link NFPropertySpec} associated with the supplied node type and property name.
*/
public NFPropertySpec getPropertySpec(String nodeType, String propertyName) {
NFNodeSpec nodeSpec = graphSpec.getNodeSpec(nodeType);
NFPropertySpec propertySpec = nodeSpec.getPropertySpec(propertyName);
return propertySpec;
}
/**
* Return a {@link NFCompressedGraph} containing all connections which have been added to this <code>NFBuildGraph</code>.
*/
public NFCompressedGraph compress() {
NFCompressedGraphBuilder builder = new NFCompressedGraphBuilder(graphSpec, nodeCache, modelHolder);
return builder.buildGraph();
}
} | 9,780 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/build/NFBuildGraphNodeList.java | package com.netflix.nfgraph.build;
import java.util.ArrayList;
/**
* Encapsulates an ordered list of {@link com.netflix.nfgraph.build.NFBuildGraphNode}s.
* @author ishastri
*/
public class NFBuildGraphNodeList {
private ArrayList<NFBuildGraphNode> list;
NFBuildGraphNodeList() {
list = new ArrayList<>();
}
public NFBuildGraphNode get(int ordinal) {
return list.get(ordinal);
}
boolean add(NFBuildGraphNode node) {
return list.add(node);
}
public int size() {
return list.size();
}
NFBuildGraphNode set(int ordinal, NFBuildGraphNode node) {
return list.set(ordinal, node);
}
}
| 9,781 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/build/NFBuildGraphNodeCache.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.build;
import java.util.HashMap;
import java.util.Map;
import com.netflix.nfgraph.NFGraphModelHolder;
import com.netflix.nfgraph.spec.NFGraphSpec;
import com.netflix.nfgraph.spec.NFNodeSpec;
public class NFBuildGraphNodeCache {
private final NFGraphSpec graphSpec;
private final NFGraphModelHolder buildGraphModelHolder;
private final Map<String,NFBuildGraphNodeList> nodesByOrdinal;
NFBuildGraphNodeCache(NFGraphSpec graphSpec, NFGraphModelHolder modelHolder) {
this.nodesByOrdinal = new HashMap<String, NFBuildGraphNodeList>();
this.graphSpec = graphSpec;
this.buildGraphModelHolder = modelHolder;
}
NFBuildGraphNode getNode(String nodeType, int ordinal) {
NFBuildGraphNodeList nodes = getNodes(nodeType);
NFNodeSpec nodeSpec = graphSpec.getNodeSpec(nodeType);
return getNode(nodes, nodeSpec, ordinal);
}
NFBuildGraphNode getNode(NFBuildGraphNodeList nodes, NFNodeSpec nodeSpec, int ordinal) {
while (ordinal >= nodes.size()) {
nodes.add(null);
}
NFBuildGraphNode node = nodes.get(ordinal);
if (node == null) {
node = new NFBuildGraphNode(nodeSpec, ordinal, buildGraphModelHolder.size());
nodes.set(ordinal, node);
}
return node;
}
public int numNodes(String nodeType) { return getNodes(nodeType).size(); }
public NFBuildGraphNodeList getNodes(String nodeType) {
NFBuildGraphNodeList nodes = nodesByOrdinal.get(nodeType);
if (nodes == null) {
nodes = new NFBuildGraphNodeList();
nodesByOrdinal.put(nodeType, nodes);
}
return nodes;
}
}
| 9,782 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/build/NFBuildGraphNode.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.build;
import java.util.Arrays;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.spec.NFNodeSpec;
import com.netflix.nfgraph.spec.NFPropertySpec;
public class NFBuildGraphNode {
private final NFNodeSpec nodeSpec;
private NFBuildGraphNodeConnections[] connectionModelSpecificConnections;
private final int ordinal;
private int numIncomingConnections;
NFBuildGraphNode(NFNodeSpec nodeSpec, int ordinal, int numKnownConnectionModels) {
this.nodeSpec = nodeSpec;
this.connectionModelSpecificConnections = new NFBuildGraphNodeConnections[numKnownConnectionModels];
this.ordinal = ordinal;
this.numIncomingConnections = 0;
}
public int getOrdinal() {
return ordinal;
}
public int getConnection(int connectionModelIndex, NFPropertySpec spec) {
NFBuildGraphNodeConnections connections = getConnections(connectionModelIndex);
if(connections == null)
return -1;
return connections.getConnection(spec);
}
public OrdinalSet getConnectionSet(int connectionModelIndex, NFPropertySpec spec) {
NFBuildGraphNodeConnections connections = getConnections(connectionModelIndex);
if(connections == null)
return OrdinalSet.EMPTY_SET;
return connections.getConnectionSet(spec);
}
public OrdinalIterator getConnectionIterator(int connectionModelIndex, NFPropertySpec spec) {
NFBuildGraphNodeConnections connections = getConnections(connectionModelIndex);
if(connections == null)
return OrdinalIterator.EMPTY_ITERATOR;
return connections.getConnectionIterator(spec);
}
void addConnection(int connectionModelIndex, NFPropertySpec spec, int ordinal) {
NFBuildGraphNodeConnections connections = getOrCreateConnections(connectionModelIndex);
connections.addConnection(spec, ordinal);
}
void incrementNumIncomingConnections() {
numIncomingConnections++;
}
int numIncomingConnections() {
return numIncomingConnections;
}
private NFBuildGraphNodeConnections getConnections(int connectionModelIndex) {
if(connectionModelSpecificConnections.length <= connectionModelIndex)
return null;
return connectionModelSpecificConnections[connectionModelIndex];
}
private NFBuildGraphNodeConnections getOrCreateConnections(int connectionModelIndex) {
if(connectionModelSpecificConnections.length <= connectionModelIndex)
connectionModelSpecificConnections = Arrays.copyOf(connectionModelSpecificConnections, connectionModelIndex + 1);
if(connectionModelSpecificConnections[connectionModelIndex] == null) {
connectionModelSpecificConnections[connectionModelIndex] = new NFBuildGraphNodeConnections(nodeSpec);
}
return connectionModelSpecificConnections[connectionModelIndex];
}
}
| 9,783 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/build/NFBuildGraphOrdinalIterator.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.build;
import java.util.Arrays;
import com.netflix.nfgraph.OrdinalIterator;
/**
* An implementation of {@link OrdinalIterator} returned for connections in an {@link NFBuildGraph}.
*/
public class NFBuildGraphOrdinalIterator implements OrdinalIterator {
private final int ordinals[];
private int currentPositionInList;
private int previousOrdinal = Integer.MIN_VALUE;
NFBuildGraphOrdinalIterator(int ordinals[], int size) {
this.ordinals = Arrays.copyOfRange(ordinals, 0, size);
Arrays.sort(this.ordinals);
}
private NFBuildGraphOrdinalIterator(int ordinals[]) {
this.ordinals = ordinals;
}
@Override
public int nextOrdinal() {
if(previousOrdinal == NO_MORE_ORDINALS)
return NO_MORE_ORDINALS;
int nextOrdinal = nextOrdinalInList();
while(nextOrdinal == previousOrdinal) {
nextOrdinal = nextOrdinalInList();
}
previousOrdinal = nextOrdinal;
return nextOrdinal;
}
@Override
public void reset() {
this.previousOrdinal = 0;
this.currentPositionInList = 0;
}
@Override
public OrdinalIterator copy() {
return new NFBuildGraphOrdinalIterator(ordinals);
}
@Override
public boolean isOrdered() {
return true;
}
private int nextOrdinalInList() {
if(currentPositionInList == ordinals.length)
return NO_MORE_ORDINALS;
return ordinals[currentPositionInList++];
}
}
| 9,784 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/build/NFBuildGraphNodeConnections.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.nfgraph.build;
import java.util.Arrays;
import com.netflix.nfgraph.OrdinalIterator;
import com.netflix.nfgraph.OrdinalSet;
import com.netflix.nfgraph.compressed.SingleOrdinalIterator;
import com.netflix.nfgraph.compressed.SingleOrdinalSet;
import com.netflix.nfgraph.spec.NFNodeSpec;
import com.netflix.nfgraph.spec.NFPropertySpec;
/**
* Represents the connections for a node in an {@link NFBuildGraph} for a single connection model.
*
* It is unlikely that this class will need to be used externally.
*/
class NFBuildGraphNodeConnections {
private static final int EMPTY_ORDINAL_ARRAY[] = new int[0];
private final int singleValues[];
private final int multipleValues[][];
private final int multipleValueSizes[];
NFBuildGraphNodeConnections(NFNodeSpec nodeSpec) {
singleValues = new int[nodeSpec.getNumSingleProperties()];
multipleValues = new int[nodeSpec.getNumMultipleProperties()][];
multipleValueSizes = new int[nodeSpec.getNumMultipleProperties()];
Arrays.fill(singleValues, -1);
Arrays.fill(multipleValues, EMPTY_ORDINAL_ARRAY);
}
int getConnection(NFPropertySpec spec) {
if(spec.isSingle())
return singleValues[spec.getPropertyIndex()];
if(multipleValues[spec.getPropertyIndex()].length > 0)
return multipleValues[spec.getPropertyIndex()].length;
return -1;
}
OrdinalSet getConnectionSet(NFPropertySpec spec) {
if(spec.isMultiple()) {
return new NFBuildGraphOrdinalSet(multipleValues[spec.getPropertyIndex()], multipleValueSizes[spec.getPropertyIndex()]);
}
return new SingleOrdinalSet(singleValues[spec.getPropertyIndex()]);
}
OrdinalIterator getConnectionIterator(NFPropertySpec spec) {
if(spec.isMultiple()) {
return new NFBuildGraphOrdinalIterator(multipleValues[spec.getPropertyIndex()], multipleValueSizes[spec.getPropertyIndex()]);
}
return new SingleOrdinalIterator(singleValues[spec.getPropertyIndex()]);
}
void addConnection(NFPropertySpec spec, int ordinal) {
if (spec.isMultiple()) {
addMultipleProperty(spec, ordinal);
} else {
singleValues[spec.getPropertyIndex()] = ordinal;
}
}
void addMultipleProperty(NFPropertySpec spec, int ordinal) {
int values[] = multipleValues[spec.getPropertyIndex()];
int propSize = multipleValueSizes[spec.getPropertyIndex()];
if(values.length == 0) {
values = new int[2];
multipleValues[spec.getPropertyIndex()] = values;
} else if(values.length == propSize) {
values = Arrays.copyOf(values, values.length * 3 / 2);
multipleValues[spec.getPropertyIndex()] = values;
}
values[propSize] = ordinal;
multipleValueSizes[spec.getPropertyIndex()]++;
}
}
| 9,785 |
0 | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph | Create_ds/netflix-graph/src/main/java/com/netflix/nfgraph/exception/NFGraphException.java | package com.netflix.nfgraph.exception;
import com.netflix.nfgraph.NFGraph;
/**
* This Exception is thrown when an invalid connection model, node type, or property type is specified in {@link NFGraph} API calls.
*/
public class NFGraphException extends RuntimeException {
private static final long serialVersionUID = -9177454492889434892L;
public NFGraphException(String message) {
super(message);
}
}
| 9,786 |
0 | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb/metrics/ExcludeMetricPredicateTest.java | /*
* Copyright (c) 2015. Airbnb.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.airbnb.metrics;
import com.yammer.metrics.core.MetricName;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
*
*/
public class ExcludeMetricPredicateTest {
@Test
public void exclude() {
ExcludeMetricPredicate predicate = new ExcludeMetricPredicate("my\\.package\\.MyClass.*");
// String group, String type, String name, String scope
assertFalse(predicate.matches(new MetricName("my.package", "MyClass", "some_name", "some_scope"), null));
assertTrue(predicate.matches(new MetricName("another.package", "MyClass", "some_name", "some_scope"), null));
}
@Test
public void exclude2() {
ExcludeMetricPredicate predicate = new ExcludeMetricPredicate("(kafka\\.consumer\\.FetchRequestAndResponseMetrics.*)|(.*ReplicaFetcherThread.*)|(kafka\\.server\\.FetcherLagMetrics\\..*)|(kafka\\.log\\.Log\\..*)|(kafka\\.cluster\\.Partition\\..*)");
assertFalse(predicate.matches(new MetricName("kafka.consumer", "FetchRequestAndResponseMetrics", "some_name", "some_scope"), null));
assertFalse(predicate.matches(new MetricName("kafka.server", "FetcherStats", "ReplicaFetcherThread", "some_scope"), null));
assertTrue(predicate.matches(new MetricName("kafka.server", "ReplicaManager", "IsrExpandsPerSec", "some_scope"), null));
}
}
| 9,787 |
0 | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb/metrics/StatsDReporterTest.java | /*
* Copyright (c) 2015. Airbnb.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.airbnb.metrics;
import java.util.EnumSet;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import com.timgroup.statsd.StatsDClient;
import com.yammer.metrics.core.Clock;
import com.yammer.metrics.core.Counter;
import com.yammer.metrics.core.Gauge;
import com.yammer.metrics.core.Histogram;
import com.yammer.metrics.core.Meter;
import com.yammer.metrics.core.Metered;
import com.yammer.metrics.core.Metric;
import com.yammer.metrics.core.MetricName;
import com.yammer.metrics.core.MetricProcessor;
import com.yammer.metrics.core.MetricsRegistry;
import com.yammer.metrics.core.Sampling;
import com.yammer.metrics.core.Summarizable;
import com.yammer.metrics.core.Timer;
import com.yammer.metrics.reporting.AbstractPollingReporter;
import com.yammer.metrics.stats.Snapshot;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.mockito.stubbing.Stubber;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class StatsDReporterTest {
private static final String METRIC_BASE_NAME = "java.lang.Object.metric";
@Mock
private Clock clock;
@Mock
private StatsDClient statsD;
private AbstractPollingReporter reporter;
private TestMetricsRegistry registry;
protected static class TestMetricsRegistry extends MetricsRegistry {
public <T extends Metric> T add(MetricName name, T metric) {
return getOrAdd(name, metric);
}
}
@Before
public void init() throws Exception {
MockitoAnnotations.initMocks(this);
when(clock.tick()).thenReturn(1234L);
when(clock.time()).thenReturn(5678L);
registry = new TestMetricsRegistry();
reporter = new StatsDReporter(registry,
statsD,
EnumSet.allOf(Dimension.class)
);
}
@Test
public void isTaggedTest() {
registry.add(new MetricName("kafka.common", "AppInfo", "Version", null, "kafka.common:type=AppInfo,name=Version"),
new Gauge<String>() {
public String value() {
return "0.8.2";
}
});
assertTrue(((StatsDReporter) reporter).isTagged(registry.allMetrics()));
}
protected <T extends Metric> void addMetricAndRunReporter(Callable<T> action) throws Exception {
// Invoke the callable to trigger (ie, mark()/inc()/etc) and return the metric
final T metric = action.call();
try {
// Add the metric to the registry, run the reporter and flush the result
registry.add(new MetricName(Object.class, "metric"), metric);
reporter.run();
} finally {
reporter.shutdown();
}
}
private void verifySend(String metricNameSuffix, double metricValue) {
verify(statsD).gauge(METRIC_BASE_NAME + "." + metricNameSuffix,
metricValue);
}
private void verifySend(double metricValue) {
verify(statsD).gauge(METRIC_BASE_NAME, metricValue);
}
private void verifySend(long metricValue) {
verify(statsD).gauge(METRIC_BASE_NAME, metricValue);
}
private void verifySend(String metricNameSuffix, String metricValue) {
verify(statsD).gauge(METRIC_BASE_NAME + "." + metricNameSuffix,
Double.valueOf(metricValue));
}
public void verifyTimer() {
verifySend("count", "1");
verifySend("meanRate", "2.00");
verifySend("1MinuteRate", "1.00");
verifySend("5MinuteRate", "5.00");
verifySend("15MinuteRate", "15.00");
verifySend("min", "1.00");
verifySend("max", "3.00");
verifySend("mean", "2.00");
verifySend("stddev", "1.50");
verifySend("median", "0.50");
verifySend("p75", "0.7505");
verifySend("p95", "0.9509");
verifySend("p98", "0.98096");
verifySend("p99", "0.99098");
verifySend("p999", "0.999998");
}
public void verifyMeter() {
verifySend("count", 1);
verifySend("meanRate", 2.00);
verifySend("1MinuteRate", 1.00);
verifySend("5MinuteRate", 5.00);
verifySend("15MinuteRate", 15.00);
}
public void verifyHistogram() {
verifySend("min", 1.00);
verifySend("max", 3.00);
verifySend("mean", 2.00);
verifySend("stddev", 1.50);
verifySend("median", 0.50);
verifySend("p75", "0.7505");
verifySend("p95", "0.9509");
verifySend("p98", "0.98096");
verifySend("p99", "0.99098");
verifySend("p999", "0.999998");
}
public void verifyCounter(long count) {
verifySend(count);
}
@Test
public final void counter() throws Exception {
final long count = new Random().nextInt(Integer.MAX_VALUE);
addMetricAndRunReporter(
new Callable<Counter>() {
@Override
public Counter call() throws Exception {
return createCounter(count);
}
});
verifyCounter(count);
}
@Test
public final void histogram() throws Exception {
addMetricAndRunReporter(
new Callable<Histogram>() {
@Override
public Histogram call() throws Exception {
return createHistogram();
}
});
verifyHistogram();
}
@Test
public final void meter() throws Exception {
addMetricAndRunReporter(
new Callable<Meter>() {
@Override
public Meter call() throws Exception {
return createMeter();
}
});
verifyMeter();
}
@Test
public final void timer() throws Exception {
addMetricAndRunReporter(
new Callable<Timer>() {
@Override
public Timer call() throws Exception {
return createTimer();
}
});
verifyTimer();
}
@Test
public final void longGauge() throws Exception {
final long value = 0xdeadbeef;
addMetricAndRunReporter(
new Callable<Gauge<Object>>() {
@Override
public Gauge<Object> call() throws Exception {
return createGauge(value);
}
});
verifySend(value);
}
@Test
public void stringGauge() throws Exception {
final String value = "The Metric";
addMetricAndRunReporter(
new Callable<Gauge<Object>>() {
@Override
public Gauge<Object> call() throws Exception {
return createGauge(value);
}
});
verify(statsD, never()).gauge(Matchers.anyString(), Matchers.anyDouble());
}
static Counter createCounter(long count) throws Exception {
final Counter mock = mock(Counter.class);
when(mock.count()).thenReturn(count);
return configureMatcher(mock, doAnswer(new MetricsProcessorAction() {
@Override
void delegateToProcessor(MetricProcessor<Object> processor, MetricName name, Object context) throws Exception {
processor.processCounter(name, mock, context);
}
}));
}
static Histogram createHistogram() throws Exception {
final Histogram mock = mock(Histogram.class);
setupSummarizableMock(mock);
setupSamplingMock(mock);
return configureMatcher(mock, doAnswer(new MetricsProcessorAction() {
@Override
void delegateToProcessor(MetricProcessor<Object> processor, MetricName name, Object context) throws Exception {
processor.processHistogram(name, mock, context);
}
}));
}
static Gauge<Object> createGauge(Object value) throws Exception {
@SuppressWarnings("unchecked")
final Gauge<Object> mock = mock(Gauge.class);
when(mock.value()).thenReturn(value);
return configureMatcher(mock, doAnswer(new MetricsProcessorAction() {
@Override
void delegateToProcessor(MetricProcessor<Object> processor, MetricName name, Object context) throws Exception {
processor.processGauge(name, mock, context);
}
}));
}
static Timer createTimer() throws Exception {
final Timer mock = mock(Timer.class);
when(mock.durationUnit()).thenReturn(TimeUnit.MILLISECONDS);
setupSummarizableMock(mock);
setupMeteredMock(mock);
setupSamplingMock(mock);
return configureMatcher(mock, doAnswer(new MetricsProcessorAction() {
@Override
void delegateToProcessor(MetricProcessor<Object> processor, MetricName name, Object context) throws Exception {
processor.processTimer(name, mock, context);
}
}));
}
static Meter createMeter() throws Exception {
final Meter mock = mock(Meter.class);
setupMeteredMock(mock);
return configureMatcher(mock, doAnswer(new MetricsProcessorAction() {
@Override
void delegateToProcessor(MetricProcessor<Object> processor, MetricName name, Object context) throws Exception {
processor.processMeter(name, mock, context);
}
}));
}
@SuppressWarnings("unchecked")
static <T extends Metric> T configureMatcher(T mock, Stubber stub) throws Exception {
stub.when(mock).processWith(any(MetricProcessor.class), any(MetricName.class), any());
return mock;
}
static abstract class MetricsProcessorAction implements Answer<Object> {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
@SuppressWarnings("unchecked")
final MetricProcessor<Object> processor = (MetricProcessor<Object>) invocation.getArguments()[0];
final MetricName name = (MetricName) invocation.getArguments()[1];
final Object context = invocation.getArguments()[2];
delegateToProcessor(processor, name, context);
return null;
}
abstract void delegateToProcessor(MetricProcessor<Object> processor, MetricName name, Object context) throws Exception;
}
static void setupSummarizableMock(Summarizable summarizable) {
when(summarizable.min()).thenReturn(1d);
when(summarizable.max()).thenReturn(3d);
when(summarizable.mean()).thenReturn(2d);
when(summarizable.stdDev()).thenReturn(1.5d);
}
static void setupMeteredMock(Metered metered) {
when(metered.count()).thenReturn(1L);
when(metered.oneMinuteRate()).thenReturn(1d);
when(metered.fiveMinuteRate()).thenReturn(5d);
when(metered.fifteenMinuteRate()).thenReturn(15d);
when(metered.meanRate()).thenReturn(2d);
when(metered.eventType()).thenReturn("eventType");
when(metered.rateUnit()).thenReturn(TimeUnit.SECONDS);
}
static void setupSamplingMock(Sampling sampling) { //be careful how snapshot defines statistics
final double[] values = new double[1001];
for (int i = 0; i < values.length; i++) {
values[i] = i / 1000.0;
}
when(sampling.getSnapshot()).thenReturn(new Snapshot(values));
}
}
| 9,788 |
0 | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb/metrics/ParserTest.java | /*
* Copyright (c) 2015. Airbnb.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.airbnb.metrics;
import com.yammer.metrics.core.MetricName;
import org.junit.Test;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
/**
*
*/
public class ParserTest {
@Test
public void testParseTagInMBeanNameWithSuffix() throws Exception {
MetricName name = new MetricName("kafka.producer",
"ProducerRequestMetrics", "ProducerRequestSize",
"clientId.group7", "kafka.producer:type=ProducerRequestMetrics,name=ProducerRequestSize,clientId=group7");
Parser p = new ParserForTagInMBeanName();
p.parse(name);
assertEquals(p.getName(), "kafka.producer.ProducerRequestMetrics.ProducerRequestSize_all");
assertArrayEquals(p.getTags(), new String[]{"clientId:group7"});
}
@Test
public void testParseTagInMBeanNameWithSuffixWithoutClientId() throws Exception {
MetricName name = new MetricName("kafka.producer",
"ProducerRequestMetrics", "ProducerRequestSize",
null, "kafka.producer:type=ProducerRequestMetrics,name=ProducerRequestSize");
Parser p = new ParserForTagInMBeanName();
p.parse(name);
assertEquals(p.getName(), "kafka.producer.ProducerRequestMetrics.ProducerRequestSize_all");
assertArrayEquals(p.getTags(), new String[]{"clientId:unknown"});
}
@Test
public void testParseTagInMBeanNameWithoutSuffix() throws Exception {
MetricName name = new MetricName("kafka.producer",
"ProducerRequestMetrics", "ProducerRequestSize",
"clientId.group7.brokerPort.9092.brokerHost.10_1_152_206",
"kafka.producer:type=ProducerRequestMetrics,name=ProducerRequestSize,clientId=group7,brokerPort=9092,brokerHost=10.1.152.206");
Parser p = new ParserForTagInMBeanName();
p.parse(name);
assertEquals(p.getName(), "kafka.producer.ProducerRequestMetrics.ProducerRequestSize");
assertArrayEquals(p.getTags(), new String[]{"clientId:group7", "brokerPort:9092", "brokerHost:10.1.152.206"});
}
@Test
public void testParseTagInMBeanNameWithoutClientId() throws Exception {
MetricName name = new MetricName("kafka.producer",
"ProducerRequestMetrics", "ProducerRequestSize",
"brokerPort.9092.brokerHost.10_1_152_206", "kafka.producer:type=ProducerRequestMetrics,name=ProducerRequestSize,brokerPort=9092,brokerHost=10.1.152.206");
Parser p = new ParserForTagInMBeanName();
p.parse(name);
assertEquals(p.getName(), "kafka.producer.ProducerRequestMetrics.ProducerRequestSize");
assertArrayEquals(p.getTags(), new String[]{"clientId:unknown", "brokerPort:9092", "brokerHost:10.1.152.206"});
}
@Test
public void testParseTagInMBeanNameWithoutSuffixForConsumer() throws Exception {
MetricName name = new MetricName("kafka.consumer",
"ZookeeperConsumerConnector", "ZooKeeperCommitsPerSec",
"clientId.group7",
"kafka.consumer:type=ZookeeperConsumerConnector,name=ZooKeeperCommitsPerSec,clientId=group7");
Parser p = new ParserForTagInMBeanName();
p.parse(name);
assertEquals(p.getName(), "kafka.consumer.ZookeeperConsumerConnector.ZooKeeperCommitsPerSec");
assertArrayEquals(p.getTags(), new String[]{"clientId:group7"});
}
@Test
public void testParseTagInMBeanNameNoTag() throws Exception {
MetricName name = new MetricName("kafka.server",
"ReplicaManager", "LeaderCount",
null, "kafka.server:type=ReplicaManager,name=LeaderCount");
Parser p = new ParserForTagInMBeanName();
p.parse(name);
assertEquals(p.getName(), "kafka.server.ReplicaManager.LeaderCount");
assertArrayEquals(p.getTags(), new String[]{});
}
@Test
public void testParseNoTag() throws Exception {
MetricName name = new MetricName("kafka.producer",
"ProducerRequestMetrics", "group7-AllBrokersProducerRequestSize");
Parser p = new ParserForNoTag();
p.parse(name);
assertEquals(p.getName(), "kafka.producer.ProducerRequestMetrics.group7-AllBrokersProducerRequestSize");
assertArrayEquals(p.getTags(), new String[]{});
}
}
| 9,789 |
0 | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb/metrics/MetricNameFormatterTest.java | /*
* Copyright (c) 2015. Airbnb.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.airbnb.metrics;
import com.yammer.metrics.core.MetricName;
import org.junit.Test;
import static com.airbnb.metrics.MetricNameFormatter.format;
import static com.airbnb.metrics.MetricNameFormatter.formatWithScope;
import static org.junit.Assert.assertEquals;
/**
*
*/
public class MetricNameFormatterTest {
@Test
public void testFormat() throws Exception {
assertEquals(
format(new MetricName("kafka.common", "AppInfo", "Version", null, "kafka.common:type=AppInfo,name=Version")),
"kafka.common.AppInfo.Version");
assertEquals(
format(new MetricName("kafka.common", "AppInfo", "Version", "my_scope", "kafka.common:type=AppInfo,name=Version")),
"kafka.common.AppInfo.Version");
}
@Test
public void testFormatWithScope() throws Exception {
assertEquals(
formatWithScope(new MetricName("kafka.common", "AppInfo", "Version", null, "kafka.common:type=AppInfo,name=Version")),
"kafka.common.AppInfo.Version");
assertEquals(
formatWithScope(new MetricName("kafka.common", "AppInfo", "Version", "", "kafka.common:type=AppInfo,name=Version")),
"kafka.common.AppInfo.Version");
assertEquals(
formatWithScope(new MetricName("kafka.common", "AppInfo", "Version", "my_scope", "kafka.common:type=AppInfo,name=Version")),
"kafka.common.AppInfo.my_scope.Version");
}
}
| 9,790 |
0 | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb/metrics/DimensionTest.java | /*
* Copyright (c) 2015. Airbnb.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.airbnb.metrics;
import org.junit.Test;
import java.util.EnumSet;
import java.util.Properties;
import static org.junit.Assert.*;
/**
*
*/
public class DimensionTest {
@Test
public void create_from_properties() {
String prefix = "foo.";
Properties p = new Properties();
p.setProperty(prefix + "count", "true");
p.setProperty(prefix + "meanRate", "false");
EnumSet<Dimension> dimensions = Dimension.fromProperties(p, prefix);
assertTrue(dimensions.contains(Dimension.count));
assertFalse(dimensions.contains(Dimension.meanRate));
assertEquals(Dimension.rate1m.displayName, "1MinuteRate");
}
}
| 9,791 |
0 | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb/metrics/KafkaStatsDReporterTest.java | package com.airbnb.metrics;
import com.timgroup.statsd.StatsDClient;
import java.util.HashMap;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import static org.mockito.Mockito.verify;
public class KafkaStatsDReporterTest {
@Mock
private StatsDClient statsD;
private KafkaStatsDReporter reporter;
private StatsDMetricsRegistry registry;
@Before
public void init() throws Exception {
MockitoAnnotations.initMocks(this);
registry = new StatsDMetricsRegistry();
reporter = new KafkaStatsDReporter(
statsD,
registry
);
}
protected void addMetricAndRunReporter(
Metric metric,
String metricName,
String tag
) throws Exception {
try {
registry.register(metric.metricName(), new MetricInfo(metric, metricName, tag));
reporter.run();
} finally {
reporter.shutdown();
}
}
@Test
public final void sendDoubleGauge() throws Exception {
final double value = 10.11;
Metric metric = new Metric() {
@Override
public MetricName metricName() {
return new MetricName("test-metric", "group", "", new HashMap<>());
}
@Override
public double value() {
return value;
}
// This is a new method added to the `Metric` interface from Kafka v1.0.0,
// which we need for tests on later Kafka versions to pass.
public Object metricValue() {
return value;
}
};
addMetricAndRunReporter(metric, "foo", "bar");
verify(statsD).gauge(Matchers.eq("foo"), Matchers.eq(value), Matchers.eq("bar"));
}
}
| 9,792 |
0 | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb/kafka | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb/kafka/kafka09/StatsdMetricsReporterTest.java | package com.airbnb.kafka.kafka09;
import com.airbnb.metrics.MetricInfo;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.timgroup.statsd.NonBlockingStatsDClient;
import com.timgroup.statsd.StatsDClient;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.metrics.KafkaMetric;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class StatsdMetricsReporterTest {
private final String TEST_METRIC_NAME = "test-metric";
private final String TEST_METRIC_GROUP = "test-group";
private final String TEST_METRIC_DESCRIPTION = "This is a test metric.";
private Map<String, String> configs;
@Before
public void init() {
configs = new HashMap<String, String>();
configs.put(StatsdMetricsReporter.STATSD_HOST, "127.0.0.1");
configs.put(StatsdMetricsReporter.STATSD_PORT, "1234");
configs.put(StatsdMetricsReporter.STATSD_METRICS_PREFIX, "foo");
configs.put(StatsdMetricsReporter.STATSD_REPORTER_ENABLED, "false");
}
@Test
public void init_should_start_reporter_when_enabled() {
configs.put(StatsdMetricsReporter.STATSD_REPORTER_ENABLED, "true");
StatsdMetricsReporter reporter = new StatsdMetricsReporter();
assertFalse("reporter should not be running", reporter.isRunning());
reporter.configure(configs);
reporter.init(new ArrayList<KafkaMetric>());
assertTrue("reporter should be running once #init has been invoked", reporter.isRunning());
}
@Test
public void init_should_not_start_reporter_when_disabled() {
configs.put(StatsdMetricsReporter.STATSD_REPORTER_ENABLED, "false");
StatsdMetricsReporter reporter = new StatsdMetricsReporter();
assertFalse("reporter should not be running", reporter.isRunning());
reporter.configure(configs);
reporter.init(new ArrayList<KafkaMetric>());
assertFalse("reporter should NOT be running once #init has been invoked", reporter.isRunning());
}
@Test
public void testMetricsReporter_sameMetricNamesWithDifferentTags() {
StatsdMetricsReporter reporter = spy(new StatsdMetricsReporter());
reporter.configure(ImmutableMap.of(StatsdMetricsReporter.STATSD_REPORTER_ENABLED, "true"));
StatsDClient mockStatsDClient = mock(NonBlockingStatsDClient.class);
when(reporter.createStatsd()).thenReturn(mockStatsDClient);
KafkaMetric testMetricWithTag = generateMockKafkaMetric(TEST_METRIC_NAME, TEST_METRIC_GROUP, TEST_METRIC_DESCRIPTION, ImmutableMap.of("test-key", "test-value"));
reporter.init(ImmutableList.of(testMetricWithTag));
Assert.assertEquals(ImmutableSet.of(testMetricWithTag), getAllKafkaMetricsHelper(reporter));
KafkaMetric otherTestMetricWithTag = generateMockKafkaMetric(TEST_METRIC_NAME, TEST_METRIC_GROUP, TEST_METRIC_DESCRIPTION, ImmutableMap.of("another-test-key", "another-test-value"));
reporter.metricChange(otherTestMetricWithTag);
Assert.assertEquals(ImmutableSet.of(testMetricWithTag, otherTestMetricWithTag), getAllKafkaMetricsHelper(reporter));
reporter.underlying.run();
reporter.registry.getAllMetricInfo().forEach(info -> verify(mockStatsDClient, atLeastOnce()).gauge(info.getName(), info.getMetric().value(), info.getTags()));
}
private KafkaMetric generateMockKafkaMetric(String name, String group, String description, Map<String, String> tags) {
KafkaMetric mockMetric = mock(KafkaMetric.class);
when(mockMetric.metricName()).thenReturn(new MetricName(name, group, description, tags));
return mockMetric;
}
private static Collection<Metric> getAllKafkaMetricsHelper(StatsdMetricsReporter reporter) {
return reporter.registry.getAllMetricInfo().stream().map(MetricInfo::getMetric).collect(Collectors.toSet());
}
}
| 9,793 |
0 | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb/kafka | Create_ds/kafka-statsd-metrics2/src/test/java/com/airbnb/kafka/kafka08/StatsdMetricsReporterTest.java | /*
* Copyright (c) 2015. Airbnb.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.airbnb.kafka.kafka08;
import kafka.utils.VerifiableProperties;
import org.junit.Before;
import org.junit.Test;
import java.util.Properties;
import static org.easymock.EasyMock.*;
import static org.junit.Assert.*;
public class StatsdMetricsReporterTest {
private VerifiableProperties properties;
@Before
public void init() {
properties = createMock(VerifiableProperties.class);
expect(properties.props()).andReturn(new Properties());
expect(properties.getInt("kafka.metrics.polling.interval.secs", 10)).andReturn(11);
expect(properties.getString("external.kafka.statsd.host", "localhost")).andReturn("127.0.0.1");
expect(properties.getInt("external.kafka.statsd.port", 8125)).andReturn(1234);
expect(properties.getString("external.kafka.statsd.metrics.prefix", "")).andReturn("foo");
expect(properties.getString("external.kafka.statsd.metrics.exclude_regex",
StatsdMetricsReporter.DEFAULT_EXCLUDE_REGEX)).andReturn("foo");
expect(properties.getBoolean("external.kafka.statsd.tag.enabled", true)).andReturn(false);
}
@Test
public void mbean_name_should_match() {
String name = new StatsdMetricsReporter().getMBeanName();
assertEquals("kafka:type=com.airbnb.kafka.kafka08.StatsdMetricsReporter", name);
}
@Test
public void init_should_start_reporter_when_enabled() {
expect(properties.getBoolean("external.kafka.statsd.reporter.enabled", false)).andReturn(true);
replay(properties);
StatsdMetricsReporter reporter = new StatsdMetricsReporter();
assertFalse("reporter should not be running", reporter.isRunning());
reporter.init(properties);
assertTrue("reporter should be running once #init has been invoked", reporter.isRunning());
verify(properties);
}
@Test
public void init_should_not_start_reporter_when_disabled() {
expect(properties.getBoolean("external.kafka.statsd.reporter.enabled", false)).andReturn(false);
replay(properties);
StatsdMetricsReporter reporter = new StatsdMetricsReporter();
assertFalse("reporter should not be running", reporter.isRunning());
reporter.init(properties);
assertFalse("reporter should NOT be running once #init has been invoked", reporter.isRunning());
verify(properties);
}
}
| 9,794 |
0 | Create_ds/kafka-statsd-metrics2/src/main/java/com/airbnb | Create_ds/kafka-statsd-metrics2/src/main/java/com/airbnb/metrics/Parser.java | /*
* Copyright (c) 2015. Airbnb.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.airbnb.metrics;
import com.yammer.metrics.core.MetricName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
*/
public abstract class Parser {
static final Logger log = LoggerFactory.getLogger(Parser.class);
protected String name;
protected String[] tags;
public String getName() {
return name;
}
public String[] getTags() {
return tags;
}
public abstract void parse(MetricName metricName);
}
| 9,795 |
0 | Create_ds/kafka-statsd-metrics2/src/main/java/com/airbnb | Create_ds/kafka-statsd-metrics2/src/main/java/com/airbnb/metrics/StatsDMetricsRegistry.java | package com.airbnb.metrics;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.common.MetricName;
public class StatsDMetricsRegistry {
private final Map<MetricName, MetricInfo> metrics;
public StatsDMetricsRegistry() {
metrics = new HashMap<>();
}
public void register(MetricName metricName, MetricInfo metricInfo) {
metrics.put(metricName, metricInfo);
}
public void unregister(MetricName metricName) {
metrics.remove(metricName);
}
public Collection<MetricInfo> getAllMetricInfo() {
return metrics.values();
}
}
| 9,796 |
0 | Create_ds/kafka-statsd-metrics2/src/main/java/com/airbnb | Create_ds/kafka-statsd-metrics2/src/main/java/com/airbnb/metrics/ParserForTagInMBeanName.java | /*
* Copyright (c) 2015. Airbnb.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.airbnb.metrics;
import com.yammer.metrics.core.MetricName;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
import static com.airbnb.metrics.MetricNameFormatter.format;
/**
* Parser for kafka 0.8.2 or later version
* where the MBeanName contains tags and
* Scope will store tags as well.
*/
public class ParserForTagInMBeanName extends Parser {
public static final String SUFFIX_FOR_ALL = "_all";
public static final String[] UNKNOWN_TAG = new String[]{"clientId:unknown"};
public static final String[] EMPTY_TAG = new String[]{};
@Override
public void parse(MetricName metricName) {
Pattern p = tagRegexMap.get(metricName.getType());
if (p != null && !p.matcher(metricName.getMBeanName()).matches()) {
name = format(metricName, SUFFIX_FOR_ALL);
} else {
name = format(metricName);
}
tags = parseTags(metricName);
}
//todo update documents
private String[] parseTags(MetricName metricName) {
String[] tags = EMPTY_TAG;
if (metricName.hasScope()) {
final String name = metricName.getName();
final String mBeanName = metricName.getMBeanName();
final int idx = mBeanName.indexOf(name);
if (idx < 0) {
log.error("Cannot find name[{}] in MBeanName[{}]", name, mBeanName);
} else {
String tagStr = mBeanName.substring(idx + name.length() + 1);
if ("kafka.producer".equals(metricName.getGroup()) &&
!tagStr.contains("clientId")) {
tagStr = "clientId=unknown,".concat(tagStr);
}
if (tagStr.length() > 0) {
tags = tagStr.replace('=', ':').split(",");
}
}
} else if ("kafka.producer".equals(metricName.getGroup())) {
tags = UNKNOWN_TAG;
}
return tags;
}
public static final Map<String, Pattern> tagRegexMap = new ConcurrentHashMap<String, Pattern>();
static {
tagRegexMap.put("BrokerTopicMetrics", Pattern.compile(".*topic=.*"));
tagRegexMap.put("DelayedProducerRequestMetrics", Pattern.compile(".*topic=.*"));
tagRegexMap.put("ProducerTopicMetrics", Pattern.compile(".*topic=.*"));
tagRegexMap.put("ProducerRequestMetrics", Pattern.compile(".*brokerHost=.*"));
tagRegexMap.put("ConsumerTopicMetrics", Pattern.compile(".*topic=.*"));
tagRegexMap.put("FetchRequestAndResponseMetrics", Pattern.compile(".*brokerHost=.*"));
tagRegexMap.put("ZookeeperConsumerConnector", Pattern.compile(".*name=OwnedPartitionsCount,.*topic=.*|^((?!name=OwnedPartitionsCount).)*$"));
}
}
| 9,797 |
0 | Create_ds/kafka-statsd-metrics2/src/main/java/com/airbnb | Create_ds/kafka-statsd-metrics2/src/main/java/com/airbnb/metrics/ParserForNoTag.java | /*
* Copyright (c) 2015. Airbnb.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.airbnb.metrics;
import com.yammer.metrics.core.MetricName;
import static com.airbnb.metrics.MetricNameFormatter.formatWithScope;
/**
* Parser for statsd not supporting tags
*/
public class ParserForNoTag extends Parser {
public static final String[] EMPTY_TAG = new String[]{};
@Override
public void parse(MetricName metricName) {
name = formatWithScope(metricName);
tags = EMPTY_TAG;
}
}
| 9,798 |
0 | Create_ds/kafka-statsd-metrics2/src/main/java/com/airbnb | Create_ds/kafka-statsd-metrics2/src/main/java/com/airbnb/metrics/MetricNameFormatter.java | /*
* Copyright (c) 2015. Airbnb.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.airbnb.metrics;
import com.yammer.metrics.core.MetricName;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class MetricNameFormatter {
static final Pattern whitespaceRegex = Pattern.compile("\\s+");
public static String formatWithScope(MetricName metricName) {
StringBuilder sb = new StringBuilder(128)
.append(metricName.getGroup())
.append('.')
.append(metricName.getType())
.append('.');
if (metricName.hasScope() && !metricName.getScope().isEmpty()) {
sb.append(metricName.getScope())
.append(".");
}
sb.append(sanitizeName(metricName.getName()));
return sb.toString();
}
public static String format(MetricName metricName) {
return format(metricName, "");
}
public static String format(MetricName metricName, String suffix) {
return new StringBuilder(128)
.append(metricName.getGroup())
.append('.')
.append(metricName.getType())
.append('.')
.append(sanitizeName(metricName.getName()))
.append(suffix)
.toString();
}
public static String sanitizeName(String name) {
Matcher m = whitespaceRegex.matcher(name);
if (m.find())
return m.replaceAll("_");
else
return name;
}
}
| 9,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.