index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics/cdi/MeteredInterceptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.cdi;
import org.eclipse.microprofile.metrics.Meter;
import org.eclipse.microprofile.metrics.MetricID;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.Tag;
import org.eclipse.microprofile.metrics.annotation.Metered;
import javax.annotation.Priority;
import javax.enterprise.inject.Intercepted;
import javax.enterprise.inject.spi.AnnotatedType;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.BeanManager;
import javax.inject.Inject;
import javax.interceptor.AroundConstruct;
import javax.interceptor.AroundInvoke;
import javax.interceptor.Interceptor;
import javax.interceptor.InvocationContext;
import java.io.Serializable;
import java.lang.reflect.Executable;
import java.lang.reflect.Modifier;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Stream;
import static java.util.Optional.ofNullable;
@Metered
@Interceptor
@Priority(Interceptor.Priority.LIBRARY_BEFORE)
public class MeteredInterceptor implements Serializable {
@Inject
private MetricRegistry registry;
@Inject
@Intercepted
private Bean<?> bean;
@Inject
private BeanManager beanManager;
@Inject
private MetricsExtension extension;
private transient volatile ConcurrentMap<Executable, Meter> meters = new ConcurrentHashMap<>();
@AroundConstruct
public Object onConstructor(final InvocationContext context) throws Exception {
findMeter(context.getConstructor()).mark();
return context.proceed();
}
@AroundInvoke
public Object onMethod(final InvocationContext context) throws Exception {
findMeter(context.getMethod()).mark();
return context.proceed();
}
private Meter findMeter(final Executable executable) {
if (meters == null) {
synchronized (this) {
if (meters == null) {
meters = new ConcurrentHashMap<>();
}
}
}
Meter meter = meters.get(executable);
if (meter == null) {
final AnnotatedType<?> type = beanManager.createAnnotatedType(bean.getBeanClass());
final Metered metered = Stream.concat(type.getMethods().stream(), type.getConstructors().stream())
.filter(it -> it.getJavaMember().equals(executable))
.findFirst()
.map(m -> m.getAnnotation(Metered.class))
.orElse(null);
final String name = Names.findName(
Modifier.isAbstract(executable.getDeclaringClass().getModifiers()) ? type.getJavaClass() : executable.getDeclaringClass(),
executable, metered == null ? null : metered.name(),
metered != null && metered.absolute(),
ofNullable(extension.getAnnotation(type, Metered.class)).map(Metered::name).orElse(""));
meter = registry.getMeter(
new MetricID(name, extension.createTags(metered == null ? new String[0] : metered.tags())));
if (meter == null) {
throw new IllegalStateException("No meter with name [" + name + "] found in registry [" + registry + "]");
}
meters.putIfAbsent(executable, meter);
}
return meter;
}
}
| 8,500 |
0 | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics/cdi/CountedInterceptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.cdi;
import org.eclipse.microprofile.metrics.Counter;
import org.eclipse.microprofile.metrics.MetricID;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.Tag;
import org.eclipse.microprofile.metrics.annotation.Counted;
import javax.annotation.Priority;
import javax.enterprise.inject.Intercepted;
import javax.enterprise.inject.spi.AnnotatedType;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.BeanManager;
import javax.inject.Inject;
import javax.interceptor.AroundConstruct;
import javax.interceptor.AroundInvoke;
import javax.interceptor.Interceptor;
import javax.interceptor.InvocationContext;
import java.io.Serializable;
import java.lang.reflect.Executable;
import java.lang.reflect.Modifier;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Stream;
import static java.util.Optional.ofNullable;
@Counted
@Interceptor
@Priority(Interceptor.Priority.LIBRARY_BEFORE)
public class CountedInterceptor implements Serializable {
@Inject
private MetricRegistry registry;
@Inject
@Intercepted
private Bean<?> bean;
@Inject
private BeanManager beanManager;
@Inject
private MetricsExtension extension;
private transient volatile ConcurrentMap<Executable, Meta> counters = new ConcurrentHashMap<>();
@AroundConstruct
public Object onConstructor(final InvocationContext context) throws Exception {
return invoke(context, context.getConstructor());
}
@AroundInvoke
public Object onMethod(final InvocationContext context) throws Exception {
return invoke(context, context.getMethod());
}
private Object invoke(final InvocationContext context, final Executable executable) throws Exception {
final Meta counter = findCounter(executable);
counter.counter.inc();
return context.proceed();
}
private Meta findCounter(final Executable executable) {
if (counters == null) {
synchronized (this) {
if (counters == null) {
counters = new ConcurrentHashMap<>();
}
}
}
Meta meta = counters.get(executable);
if (meta == null) {
final AnnotatedType<?> type = beanManager.createAnnotatedType(bean.getBeanClass());
final Counted counted = Stream.concat(type.getMethods().stream(), type.getConstructors().stream())
.filter(it -> it.getJavaMember().equals(executable))
.findFirst()
.map(m -> m.getAnnotation(Counted.class))
.orElse(null);
final String name = Names.findName(
Modifier.isAbstract(executable.getDeclaringClass().getModifiers()) ? type.getJavaClass() : executable.getDeclaringClass(),
executable, counted == null ? null : counted.name(),
counted != null && counted.absolute(),
ofNullable(extension.getAnnotation(type, Counted.class)).map(Counted::name).orElse(""));
final Counter counter = registry.getCounter(
new MetricID(name, extension.createTags(counted == null ? new String[0] : counted.tags())));
if (counter == null) {
throw new IllegalStateException("No counter with name [" + name + "] found in registry [" + registry + "]");
}
meta = new Meta(counter);
counters.putIfAbsent(executable, meta);
}
return meta;
}
private static final class Meta {
private final Counter counter;
private Meta(final Counter counter) {
this.counter = counter;
}
}
}
| 8,501 |
0 | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics/src/main/java/org/apache/geronimo/microprofile/metrics/jaxrs/CdiMetricsEndpoints.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.jaxrs;
import static org.eclipse.microprofile.metrics.MetricRegistry.Type.BASE;
import static org.eclipse.microprofile.metrics.MetricRegistry.Type.VENDOR;
import javax.annotation.PostConstruct;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import javax.ws.rs.Path;
import org.apache.geronimo.microprofile.metrics.common.jaxrs.MetricsEndpoints;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.annotation.RegistryType;
@Path("metrics")
@ApplicationScoped
public class CdiMetricsEndpoints extends MetricsEndpoints {
@Inject
@RegistryType(type = BASE)
private MetricRegistry baseRegistry;
@Inject
@RegistryType(type = VENDOR)
private MetricRegistry vendorRegistry;
@Inject
private MetricRegistry applicationRegistry;
@PostConstruct
protected void init() {
setApplicationRegistry(applicationRegistry);
setBaseRegistry(baseRegistry);
setVendorRegistry(vendorRegistry);
super.init();
}
}
| 8,502 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-tomcat/src/test/java/org/apache/geronimo/microprofile/metrics/extension | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-tomcat/src/test/java/org/apache/geronimo/microprofile/metrics/extension/tomcat/TomcatExtensionTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.extension.tomcat;
import static org.junit.Assert.assertNotNull;
import javax.enterprise.inject.spi.CDI;
import org.apache.geronimo.microprofile.metrics.extension.common.RegistryTypeLiteral;
import org.apache.meecrowave.Meecrowave;
import org.apache.meecrowave.junit.MeecrowaveRule;
import org.eclipse.microprofile.metrics.Gauge;
import org.eclipse.microprofile.metrics.MetricID;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.junit.ClassRule;
import org.junit.Test;
public class TomcatExtensionTest {
@ClassRule
public static final MeecrowaveRule MEECROWAVE = new MeecrowaveRule(new Meecrowave.Builder() {{
setTomcatNoJmx(false);
}}, "");
@Test
public void checkTomcatRegistration() {
final Gauge gauge = CDI.current()
.select(MetricRegistry.class, new RegistryTypeLiteral(MetricRegistry.Type.BASE))
.get()
.getGauges()
.get(new MetricID("server.executor.port_" + MEECROWAVE.getConfiguration().getHttpPort() + ".active"));
assertNotNull(gauge);
assertNotNull(gauge.getValue());
}
}
| 8,503 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-tomcat/src/main/java/org/apache/geronimo/microprofile/metrics/extension | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-tomcat/src/main/java/org/apache/geronimo/microprofile/metrics/extension/tomcat/TomcatExtension.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.extension.tomcat;
import java.util.Set;
import javax.enterprise.event.Observes;
import javax.enterprise.inject.spi.AfterDeploymentValidation;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.BeanManager;
import javax.enterprise.inject.spi.BeforeShutdown;
import javax.enterprise.inject.spi.Extension;
import org.apache.geronimo.microprofile.metrics.extension.common.MicroprofileMetricsAdapter;
import org.apache.geronimo.microprofile.metrics.extension.common.RegistryTypeLiteral;
import org.eclipse.microprofile.metrics.MetricRegistry;
public class TomcatExtension implements Extension {
private TomcatRegistrar registrar;
void afterValidation(@Observes final AfterDeploymentValidation validation,
final BeanManager beanManager) {
final MetricRegistry.Type registryType = MetricRegistry.Type.valueOf(
System.getProperty("geronimo.metrics.tomcat.registry.type", "BASE"));
final Set<Bean<?>> beans = beanManager.getBeans(MetricRegistry.class, new RegistryTypeLiteral(registryType));
final MetricRegistry registry = MetricRegistry.class.cast(beanManager.getReference(
beanManager.resolve(beans), MetricRegistry.class, beanManager.createCreationalContext(null)));
final MicroprofileMetricsAdapter adapter = new MicroprofileMetricsAdapter(registry);
registrar = new TomcatRegistrar(adapter.registrer(), adapter.unregistrer());
registrar.start();
}
void beforeShutdown(@Observes final BeforeShutdown beforeShutdown) {
if (registrar != null) {
registrar.stop();
}
}
}
| 8,504 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-tomcat/src/main/java/org/apache/geronimo/microprofile/metrics/extension | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-tomcat/src/main/java/org/apache/geronimo/microprofile/metrics/extension/tomcat/TomcatRegistrar.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.extension.tomcat;
import java.lang.annotation.Annotation;
import java.lang.management.ManagementFactory;
import java.util.Collection;
import java.util.HashSet;
import java.util.Objects;
import java.util.ServiceLoader;
import java.util.concurrent.Executor;
import java.util.function.Consumer;
import java.util.function.Supplier;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import javax.management.MBeanServer;
import javax.management.ObjectInstance;
import javax.management.ObjectName;
import org.apache.catalina.Server;
import org.apache.catalina.Service;
import org.apache.catalina.connector.Connector;
import org.apache.coyote.AbstractProtocol;
import org.apache.geronimo.microprofile.metrics.extension.common.Definition;
import org.apache.geronimo.microprofile.metrics.extension.common.ThrowingSupplier;
import org.apache.tomcat.util.threads.ThreadPoolExecutor;
// this is a class working outside a MP server, don't import CDI or anything not selfcontained!
public class TomcatRegistrar {
private final Consumer<Definition> onRegister;
private final Consumer<Definition> onUnregister;
public TomcatRegistrar(final Consumer<Definition> onRegister,
final Consumer<Definition> onUnregister) {
this.onRegister = onRegister;
this.onUnregister = onUnregister;
}
public void start() {
final Collection<Integer> ports = new HashSet<>();
Stream.concat(
findServers(),
StreamSupport.stream(ServiceLoader.load(ServerRegistration.class).spliterator(), false)
.map(Supplier::get))
.filter(Objects::nonNull)
.distinct()
.map(Server::findServices)
.flatMap(Stream::of)
.map(Service::findConnectors)
.flatMap(Stream::of)
.map(Connector::getProtocolHandler)
.filter(AbstractProtocol.class::isInstance)
.map(AbstractProtocol.class::cast)
.forEach(protocol -> {
final Executor executor = protocol.getExecutor();
final int port = protocol.getPort();
if (!ports.add(port)) {
return;
}
final String prefix = "server.executor.port_" + port + ".";
if (java.util.concurrent.ThreadPoolExecutor.class.isInstance(executor)) {
final java.util.concurrent.ThreadPoolExecutor pool =
java.util.concurrent.ThreadPoolExecutor.class.cast(executor);
addGauge(prefix + "queue.size", "Connector Queue Size", () -> pool.getQueue().size());
addGauge(prefix + "active", "Connector Active Count", pool::getActiveCount);
addGauge(prefix + "tasks.completed", "Connector Completed Tasks", pool::getCompletedTaskCount);
addGauge(prefix + "tasks.count", "Connector Tasks Count", pool::getTaskCount);
}
if (ThreadPoolExecutor.class.isInstance(executor)) {
final ThreadPoolExecutor pool = ThreadPoolExecutor.class.cast(executor);
addGauge(prefix + "submitted", "Connector Submitted Tasks", pool::getSubmittedCount);
}
});
// plain tomcat, test on jmx, not as rich as from the instance (this is why we have a SPI)
final MBeanServer server = ManagementFactory.getPlatformMBeanServer();
try {
server.queryMBeans(new ObjectName("*:type=ThreadPool,*"), null).stream()
.map(ObjectInstance::getObjectName)
.filter(it -> ports.add(getPort(it)))
.forEach(name -> {
final String prefix = "server.executor.port_" + getPort(name) + ".";
addGauge(prefix + "thread.count", "Connector Thread Count", () -> Number.class.cast(server.getAttribute(name, "currentThreadCount")));
addGauge(prefix + "active", "Connector Thread Busy", () -> Number.class.cast(server.getAttribute(name, "currentThreadsBusy")));
});
} catch (final Exception e) {
// no-op
}
}
private int getPort(final ObjectName it) {
final String name = it.getKeyPropertyList().get("name");
final int sep = name.lastIndexOf('-');
final String port = name.substring(sep + 1);
return Integer.parseInt(port);
}
private void addGauge(final String name, final String descriptionAndDisplayName,
final ThrowingSupplier<Number> supplier) {
onRegister.accept(new Definition(name, descriptionAndDisplayName, descriptionAndDisplayName, "count", supplier));
}
private Stream<Server> findServers() {
return Stream.of(findMeecrowave(), findTomEE());
}
private Server findTomEE() {
try {
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
final Class<?> tomcatHelper = loader.loadClass("org.apache.tomee.loader.TomcatHelper");
return Server.class.cast(tomcatHelper.getMethod("").invoke(null));
} catch (final Exception | Error e) {
return null;
}
}
private Server findMeecrowave() {
try {
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
final Class<?> meecrowaveClass = loader.loadClass("org.apache.meecrowave.Meecrowave");
final Class<?> cdi = loader.loadClass("javax.enterprise.inject.spi.CDI");
final Object current = cdi.getMethod("current").invoke(null);
final Object meecrowaveInstance = cdi.getMethod("select", Class.class, Annotation[].class)
.invoke(current, meecrowaveClass, new Annotation[0]);
final Object meecrowave = meecrowaveInstance.getClass().getMethod("get").invoke(meecrowaveInstance);
final Object tomcat = meecrowave.getClass().getMethod("getTomcat").invoke(meecrowave);
return Server.class.cast(tomcat.getClass().getMethod("getServer").invoke(tomcat));
} catch (final Exception | Error e) {
return null;
}
}
public void stop() {
// no-op for now
}
}
| 8,505 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-tomcat/src/main/java/org/apache/geronimo/microprofile/metrics/extension | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-tomcat/src/main/java/org/apache/geronimo/microprofile/metrics/extension/tomcat/ServerRegistration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.extension.tomcat;
import java.util.function.Supplier;
import org.apache.catalina.Server;
@FunctionalInterface
public interface ServerRegistration extends Supplier<Server> {
}
| 8,506 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-extension-common/src/main/java/org/apache/geronimo/microprofile/metrics/extension | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-extension-common/src/main/java/org/apache/geronimo/microprofile/metrics/extension/common/Definition.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.extension.common;
import java.util.Objects;
import java.util.function.DoubleSupplier;
public class Definition {
private final String name;
private final String displayName;
private final String description;
private final String unit;
private final DoubleSupplier evaluator;
private final int hash;
public Definition(final String name, final String displayName, final String description,
final String unit, final ThrowingSupplier<Number> evaluator) {
this.name = name;
this.displayName = displayName;
this.description = description;
this.unit = unit;
this.evaluator = () -> {
try {
return evaluator.get().doubleValue();
} catch (final Throwable throwable) {
return -1;
}
};
this.hash = Objects.hash(name);
}
public String getName() {
return name;
}
public String getDisplayName() {
return displayName;
}
public String getDescription() {
return description;
}
public String getUnit() {
return unit;
}
public DoubleSupplier getEvaluator() {
return evaluator;
}
@Override
public boolean equals(final Object that) {
if (this == that) {
return true;
}
if (that == null || getClass() != that.getClass()) {
return false;
}
return Objects.equals(name, Definition.class.cast(that).name);
}
@Override
public int hashCode() {
return hash;
}
}
| 8,507 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-extension-common/src/main/java/org/apache/geronimo/microprofile/metrics/extension | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-extension-common/src/main/java/org/apache/geronimo/microprofile/metrics/extension/common/ThrowingSupplier.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.extension.common;
@FunctionalInterface
public interface ThrowingSupplier<T> {
T get() throws Throwable;
}
| 8,508 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-extension-common/src/main/java/org/apache/geronimo/microprofile/metrics/extension | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-extension-common/src/main/java/org/apache/geronimo/microprofile/metrics/extension/common/RegistryTypeLiteral.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.extension.common;
import java.lang.annotation.Annotation;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.annotation.RegistryType;
public class RegistryTypeLiteral implements RegistryType {
private final MetricRegistry.Type type;
public RegistryTypeLiteral(final MetricRegistry.Type registryType) {
this.type = registryType;
}
@Override
public MetricRegistry.Type type() {
return type;
}
@Override
public Class<? extends Annotation> annotationType() {
return RegistryType.class;
}
}
| 8,509 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-extension-common/src/main/java/org/apache/geronimo/microprofile/metrics/extension | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-extension-common/src/main/java/org/apache/geronimo/microprofile/metrics/extension/common/MicroprofileMetricsAdapter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.extension.common;
import static org.eclipse.microprofile.metrics.MetricType.GAUGE;
import java.util.function.Consumer;
import java.util.logging.Logger;
import org.eclipse.microprofile.metrics.Gauge;
import org.eclipse.microprofile.metrics.Metadata;
import org.eclipse.microprofile.metrics.MetricRegistry;
public class MicroprofileMetricsAdapter {
private final MetricRegistry registry;
public MicroprofileMetricsAdapter(final MetricRegistry registry) {
this.registry = registry;
}
public Consumer<Definition> registrer() {
return def -> {
final Metadata metadata = Metadata.builder()
.withName(def.getName())
.withDisplayName(def.getDisplayName())
.withDescription(def.getDescription())
.withType(GAUGE)
.withUnit(def.getUnit())
.build();
try {
registry.register(metadata, (Gauge<Double>) () -> def.getEvaluator()
.getAsDouble());
} catch (final RuntimeException re) {
Logger.getLogger(MicroprofileMetricsAdapter.class.getName()).fine(re.getMessage());
}
};
}
public Consumer<Definition> unregistrer() {
return def -> registry.remove(def.getName());
}
}
| 8,510 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-sigar/src/test/java/org/apache/geronimo/microprofile/metrics/extension | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-sigar/src/test/java/org/apache/geronimo/microprofile/metrics/extension/sigar/SigarTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.extension.sigar;
import static java.util.stream.Collectors.toList;
import static org.eclipse.microprofile.metrics.MetricRegistry.Type.BASE;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.List;
import javax.inject.Inject;
import org.apache.meecrowave.Meecrowave;
import org.apache.meecrowave.junit.MeecrowaveRule;
import org.eclipse.microprofile.metrics.MetricID;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.annotation.RegistryType;
import org.junit.ClassRule;
import org.junit.Test;
public class SigarTest {
@ClassRule
public static final MeecrowaveRule RULE = new MeecrowaveRule(new Meecrowave.Builder() {{
setSkipHttp(true);
}}, "");
@Inject
@RegistryType(type = BASE)
private MetricRegistry registry;
@Test
public void test() {
RULE.inject(this);
final List<String> keys = registry.getGauges()
.keySet()
.stream()
.filter(it -> it.getName().startsWith("sigar."))
.map(MetricID::getName)
.sorted()
.collect(toList());
assertTrue(keys.toString(), keys.size() > 10 /*whatever, just check it is registered*/);
// ensure gauge is usable
final Object cpu = registry.getGauges().get(new MetricID("sigar.cpu.total")).getValue();
assertNotNull(cpu);
}
}
| 8,511 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-sigar/src/main/java/org/apache/geronimo/microprofile/metrics/extension | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-sigar/src/main/java/org/apache/geronimo/microprofile/metrics/extension/sigar/SigarRegistrar.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.extension.sigar;
import static java.util.stream.Collectors.toList;
import static org.hyperic.sigar.SigarProxyCache.EXPIRE_DEFAULT;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import org.apache.geronimo.microprofile.metrics.extension.common.Definition;
import org.apache.geronimo.microprofile.metrics.extension.common.ThrowingSupplier;
import org.hyperic.sigar.Cpu;
import org.hyperic.sigar.CpuInfo;
import org.hyperic.sigar.FileSystem;
import org.hyperic.sigar.Sigar;
import org.hyperic.sigar.SigarException;
import org.hyperic.sigar.SigarProxy;
import org.hyperic.sigar.SigarProxyCache;
// important: this class is stack agnostic and must not use cdi or anything else
public class SigarRegistrar {
private final Consumer<Definition> onRegister;
private final Consumer<Definition> onUnregister;
private Sigar sigarImpl;
private SigarProxy sigar;
private Thread refreshThread;
private volatile boolean stopped = true;
private long refreshInterval;
private final Map<String, Definition> currentDefinitions = new HashMap<>();
public SigarRegistrar(final Consumer<Definition> onRegister,
final Consumer<Definition> onUnregister) {
this.onRegister = onRegister;
this.onUnregister = onUnregister;
}
public synchronized void start() {
this.sigarImpl = new Sigar();
this.sigar = SigarProxyCache.newInstance(sigarImpl, Integer.getInteger("geronimo.metrics.sigar.cache", EXPIRE_DEFAULT));
refreshInterval = Long.getLong("geronimo.metrics.sigar.refreshInterval", TimeUnit.MINUTES.toMillis(5));
if (refreshInterval > 0) {
refreshThread = new Thread(() -> {
final long iterationDuration = 250;
final long iterations = refreshInterval / iterationDuration;
while (!stopped) {
for (long i = 0; i < iterations; i++) {
if (stopped) {
return;
}
try {
Thread.sleep(iterationDuration);
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
}
}
tick();
}
}, getClass().getName() + "-refresher-" + hashCode());
stopped = false;
refreshThread.start();
}
tick();
}
public synchronized void tick() {
final Collection<Definition> currentMetrics = collectMetrics();
final Collection<Definition> alreadyRegistered = currentMetrics.stream()
.filter(it -> currentDefinitions.containsKey(it.getName()))
.collect(toList());
final Collection<Definition> missingRegistered = new HashSet<>(currentDefinitions.values());
missingRegistered.removeAll(alreadyRegistered);
// remove no more accurate metrics
missingRegistered.forEach(it -> {
currentDefinitions.remove(it.getName());
if (onUnregister != null) {
onUnregister.accept(it);
}
});
// register new metrics
currentMetrics.removeAll(alreadyRegistered);
currentMetrics.forEach(it -> onRegister.accept(new Definition(
it.getName(), it.getDisplayName(), it.getDescription(), it.getUnit(),
() -> it.getEvaluator().getAsDouble())));
}
public synchronized void stop() {
if (refreshThread != null) {
stopped = true;
try {
refreshThread.join(500);
if (refreshThread.isAlive()) {
refreshThread.interrupt();
}
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
refreshThread = null;
}
}
sigarImpl.close();
}
private Collection<Definition> collectMetrics() {
final Collection<Definition> definitions = new HashSet<>();
// global
addCpu(definitions, "sigar.cpu.", () -> sigar.getCpu());
addMem(definitions);
// individual CPU
try {
final CpuInfo[] cpuInfoList = sigar.getCpuInfoList();
IntStream.range(0, cpuInfoList.length)
.forEach(idx -> addCpu(definitions, "sigar.cpu." + idx + ".", () -> sigar.getCpuList()[idx]));
} catch (final SigarException se) {
// ignore
}
// network
addNetwork(definitions);
// filesystem
addFileSystem(definitions);
return definitions;
}
private void addFileSystem(final Collection<Definition> definitions) {
try {
Stream.of(sigar.getFileSystemList())
.filter(it -> !it.getDirName().startsWith("/sys") &&
!it.getDirName().startsWith("/dev") &&
!it.getDirName().startsWith("/proc") &&
!it.getDirName().startsWith("/run") &&
!it.getDirName().startsWith("/snap"))
.map(FileSystem::getDevName)
.distinct()
.forEach(devName -> {
final String baseName = "sigar.net.disk." + devName.replace('/', '_').replaceFirst("^_", "") + ".";
definitions.add(new Definition(
baseName + "read.count", devName + " Reads",
"Reads on " + devName, "count",
() -> sigar.getDiskUsage(devName).getReads()));
definitions.add(new Definition(
baseName + "write.count", devName + " Writes",
"Writes on " + devName, "count",
() -> sigar.getDiskUsage(devName).getWrites()));
definitions.add(new Definition(
baseName + "read.bytes", devName + " Reads",
"Reads on " + devName, "bytes",
() -> sigar.getDiskUsage(devName).getReadBytes()));
definitions.add(new Definition(
baseName + "write.bytes", devName + " Writes",
"Writes on " + devName, "bytes",
() -> sigar.getDiskUsage(devName).getWriteBytes()));
});
} catch (final SigarException e) {
// no-op
}
}
private void addNetwork(final Collection<Definition> definitions) {
try {
sigar.getTcp(); // ensure it is allowed+available
definitions.add(new Definition("sigar.network.tcp.active.opens", "Opening connections",
"Active connections openings", "count",
() -> sigar.getTcp().getActiveOpens()));
definitions.add(new Definition("sigar.network.tcp.passive.opens", "Passive connections",
"Passive connection openings", "count",
() -> sigar.getTcp().getPassiveOpens()));
definitions.add(new Definition("sigar.network.tcp.attempts.fails", "Failed connections",
"Failed connection attempts", "count",
() -> sigar.getTcp().getAttemptFails()));
definitions.add(new Definition("sigar.network.tcp.established.reset", "Resetted connections",
"Connection resets received", "count",
() -> sigar.getTcp().getEstabResets()));
definitions.add(new Definition("sigar.network.tcp.established.current", "Established connections",
"Connections established", "count",
() -> sigar.getTcp().getCurrEstab()));
definitions.add(new Definition("sigar.network.tcp.segments.in", "Received segments",
"Received segments", "count",
() -> sigar.getTcp().getInSegs()));
definitions.add(new Definition("sigar.network.tcp.segments.out", "Sent segments",
"Send out segments", "count",
() -> sigar.getTcp().getOutSegs()));
definitions.add(new Definition("sigar.network.tcp.segments.retrans", "Retransmitted segments",
"Retransmitted segments", "count",
() -> sigar.getTcp().getRetransSegs()));
definitions.add(new Definition("sigar.network.tcp.resets.out", "Sent resets",
"Sent resets", "count",
() -> sigar.getTcp().getOutRsts()));
} catch (final Exception | Error notAvailable) {
// no-op
}
try {
sigar.getNetStat();
definitions.add(new Definition("sigar.network.tcp.output.total", "Total Outbound",
"Sent bytes", "bytes",
() -> sigar.getNetStat().getTcpOutboundTotal()));
definitions.add(new Definition("sigar.network.tcp.inbound.total", "Total Inbound",
"Received bytes", "bytes",
() -> sigar.getNetStat().getTcpInboundTotal()));
definitions.add(new Definition("sigar.network.tcp.established", "TCP established",
"TCP established", "count",
() -> sigar.getNetStat().getTcpEstablished()));
definitions.add(new Definition("sigar.network.tcp.idle", "TCP Idle",
"TCP Idle", "count",
() -> sigar.getNetStat().getTcpIdle()));
definitions.add(new Definition("sigar.network.tcp.closing", "TCP Closing",
"TCP Closing", "count",
() -> sigar.getNetStat().getTcpClosing()));
definitions.add(new Definition("sigar.network.tcp.bound", "TCP Bound",
"TCP Bound", "count",
() -> sigar.getNetStat().getTcpBound()));
definitions.add(new Definition("sigar.network.tcp.close", "TCP Close",
"TCP Close", "count",
() -> sigar.getNetStat().getTcpClose()));
definitions.add(new Definition("sigar.network.tcp.closewait", "TCP Close Wait",
"TCP Close Wait", "count",
() -> sigar.getNetStat().getTcpCloseWait()));
definitions.add(new Definition("sigar.network.tcp.listen", "TCP Listen",
"TCP Listen", "count",
() -> sigar.getNetStat().getTcpListen()));
} catch (final Exception | Error notAvailable) {
// no-op
}
}
private void addMem(final Collection<Definition> definitions) {
definitions.add(new Definition(
"sigar.mem.ram", "System RAM Memory",
"The total amount of physical memory, in [bytes]", "bytes",
() -> sigar.getMem().getRam()));
definitions.add(new Definition(
"sigar.mem.total", "System Total Memory",
"The amount of physical memory, in [bytes]", "bytes",
() -> sigar.getMem().getTotal()));
definitions.add(new Definition(
"sigar.mem.used", "System Used Memory",
"The amount of physical memory in use, in [bytes]", "bytes",
() -> sigar.getMem().getUsed()));
definitions.add(new Definition(
"sigar.mem.free", "System Free Memory",
"The amount of free physical memory, in [bytes]", "bytes",
() -> sigar.getMem().getFree()));
definitions.add(new Definition(
"sigar.mem.actual.used", "System Actual Used Memory",
"The actual amount of physical memory in use, in [bytes]", "bytes",
() -> sigar.getMem().getActualUsed()));
definitions.add(new Definition(
"sigar.mem.actual.free", "System Actual Free Memory",
"The actual amount of free physical memory, in [bytes]", "bytes",
() -> sigar.getMem().getActualFree()));
}
private void addCpu(final Collection<Definition> definitions,
final String base,
final ThrowingSupplier<Cpu> provider) {
definitions.add(new Definition(
base + "idle", "CPU Idle Time",
"The idle time of the CPU, in [ms]", "ms",
() -> provider.get().getIdle()));
definitions.add(new Definition(
base + "nice", "CPU Nice Priority Time",
"The time of the CPU spent on nice priority, in [ms]", "ms",
() -> provider.get().getNice()));
definitions.add(new Definition(
base + "sys", "CPU User Time",
"The time of the CPU used by the system, in [ms]", "ms",
() -> provider.get().getSys()));
definitions.add(new Definition(
base + "total", "CPU Total Time",
"The total time of the CPU, in [ms]", "ms",
() -> provider.get().getTotal()));
definitions.add(new Definition(
base + "wait", "CPU Wait Time",
"The time the CPU had to wait for data to be loaded, in [ms]", "ms",
() -> provider.get().getWait()));
}
}
| 8,512 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-sigar/src/main/java/org/apache/geronimo/microprofile/metrics/extension | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-sigar/src/main/java/org/apache/geronimo/microprofile/metrics/extension/sigar/InitSigar.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.extension.sigar;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.logging.Logger;
import org.hyperic.jni.ArchLoaderException;
import org.hyperic.jni.ArchNotSupportedException;
import org.hyperic.sigar.Sigar;
import org.hyperic.sigar.SigarException;
import org.hyperic.sigar.SigarLoader;
// note: for some integration the license is GPL3 so we can't provide it
// -> user will have to download sigar-dist, unpack it and point to the folder using
// $ -Dorg.hyperic.sigar.path=/path/to/folder/with/native/integs
public class InitSigar {
private boolean valid;
private final File tempDir;
public InitSigar(final File tempDir) {
this.tempDir = tempDir;
}
public void ensureSigarIsSetup() {
valid = true;
final SigarLoader loader = new SigarLoader(Sigar.class);
try {
loadFromPath(loader);
} catch (final UnsatisfiedLinkError e) {
unavailable(e.getMessage());
}
}
private void loadFromPath(final SigarLoader loader) {
try {
final String systemProp = loader.getPackageName() + ".path";
final String path = System.getProperty(systemProp);
if (path == null) {
final String libraryName = loader.getLibraryName();
final File output = new File(tempDir, "sigar/" + libraryName);
if (!output.exists()) {
final int dot = libraryName.lastIndexOf('.');
final String resourceName = libraryName.substring(0, dot) + "-"
+ System.getProperty("sigar.version", "1.6.4") + libraryName.substring(dot);
try (final InputStream stream = Thread.currentThread().getContextClassLoader()
.getResourceAsStream(resourceName)) {
if (stream != null) {
output.getParentFile().mkdirs();
Files.copy(stream, output.toPath(), StandardCopyOption.REPLACE_EXISTING);
} else {
unavailable("native library not found in the classloader as " + resourceName);
return;
}
loader.load(output.getParentFile().getAbsolutePath());
afterLoad(systemProp);
} catch (final ArchLoaderException | IOException ex) {
unavailable(ex.getMessage());
}
}
} else if (!"-".equals(path)) {
try {
loader.load(path);
afterLoad(systemProp);
} catch (final ArchLoaderException ex) {
unavailable(ex.getMessage());
}
}
} catch (final ArchNotSupportedException ex) {
unavailable(ex.getMessage());
}
}
private void unavailable(final String message) {
Logger.getLogger(InitSigar.class.getName()).info("Sigar is not available: " + message);
valid = false;
}
private void afterLoad(final String systemProp) {
// ensure it works
final String original = System.getProperty(systemProp);
System.setProperty(systemProp, "-");
try {
testItWorks();
} catch (final Throwable throwable) {
unavailable(throwable.getMessage());
if (original == null) {
System.clearProperty(systemProp);
} else {
System.setProperty(systemProp, original);
}
}
}
private void testItWorks() throws SigarException {
final Sigar sigar = new Sigar();
sigar.getCpu();
sigar.close();
}
public boolean isValid() {
return valid;
}
}
| 8,513 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-sigar/src/main/java/org/apache/geronimo/microprofile/metrics/extension | Create_ds/geronimo-metrics/geronimo-metrics-extensions/geronimo-metrics-sigar/src/main/java/org/apache/geronimo/microprofile/metrics/extension/sigar/MicroprofileMetricsSigarRegistrar.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.extension.sigar;
import java.io.File;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Stream;
import javax.enterprise.event.Observes;
import javax.enterprise.inject.spi.AfterDeploymentValidation;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.BeanManager;
import javax.enterprise.inject.spi.BeforeShutdown;
import javax.enterprise.inject.spi.Extension;
import org.apache.geronimo.microprofile.metrics.extension.common.MicroprofileMetricsAdapter;
import org.apache.geronimo.microprofile.metrics.extension.common.RegistryTypeLiteral;
import org.eclipse.microprofile.metrics.MetricRegistry;
public class MicroprofileMetricsSigarRegistrar implements Extension {
private SigarRegistrar registrar;
void afterValidation(@Observes final AfterDeploymentValidation validation,
final BeanManager beanManager) {
final InitSigar initSigar = new InitSigar(findTempDir());
initSigar.ensureSigarIsSetup();
if (!initSigar.isValid()) {
return;
}
final MetricRegistry.Type registryType = MetricRegistry.Type.valueOf(
System.getProperty("geronimo.metrics.sigar.registry.type", "BASE"));
final Set<Bean<?>> beans = beanManager.getBeans(MetricRegistry.class, new RegistryTypeLiteral(registryType));
final MetricRegistry registry = MetricRegistry.class.cast(beanManager.getReference(
beanManager.resolve(beans), MetricRegistry.class, beanManager.createCreationalContext(null)));
final MicroprofileMetricsAdapter adapter = new MicroprofileMetricsAdapter(registry);
registrar = new SigarRegistrar(adapter.registrer(), adapter.unregistrer());
registrar.start();
}
void beforeShutdown(@Observes final BeforeShutdown beforeShutdown) {
if (registrar != null) {
registrar.stop();
}
}
// let's try some well know temp folders and fallback on java io one
private File findTempDir() {
return new File(
Stream.of(
"geronimo.metrics.sigar.location",
"catalina.base", "catalina.base",
"meecrowave.base", "tomee.base",
"application.base", "application.home")
.map(System::getProperty)
.filter(Objects::nonNull)
.map(File::new)
.filter(File::exists)
.flatMap(root -> Stream.of(
new File(root, "work"),
new File(root, "temp"),
new File(root, "tmp")))
.filter(File::exists)
.findFirst()
.orElseGet(() -> new File(System.getProperty("java.io.tmpdir", "."))),
System.getProperty("geronimo.metrics.sigar.folder", "sigar"));
}
}
| 8,514 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/test/java/org/apache/geronimo/microprofile/metrics/common | Create_ds/geronimo-metrics/geronimo-metrics-common/src/test/java/org/apache/geronimo/microprofile/metrics/common/json/JsonMetricTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common.json;
import org.apache.geronimo.microprofile.metrics.common.RegistryImpl;
import org.apache.geronimo.microprofile.metrics.common.jaxrs.MetricsEndpoints;
import org.apache.geronimo.microprofile.metrics.common.jaxrs.SecurityValidator;
import org.apache.geronimo.microprofile.metrics.common.prometheus.PrometheusFormatter;
import org.eclipse.microprofile.metrics.Gauge;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.junit.Test;
import javax.ws.rs.core.SecurityContext;
import javax.ws.rs.core.UriInfo;
import static java.util.Collections.singletonMap;
import static org.junit.Assert.assertEquals;
public class JsonMetricTest {
@Test
public void testJsonGaugeValue() {
final RegistryImpl registry = new RegistryImpl(MetricRegistry.Type.APPLICATION);
registry.register("foo", (Gauge<Long>) () -> 1L);
final MetricsEndpoints endpoints = new MetricsEndpoints();
endpoints.setApplicationRegistry(registry);
endpoints.setPrometheus(new PrometheusFormatter());
endpoints.setSecurityValidator(new SecurityValidator() {
@Override
public void checkSecurity(final SecurityContext securityContext, final UriInfo uriInfo) {
// no-op
}
});
final Object json = endpoints.getJson("application", "foo", null, null);
assertEquals(singletonMap("foo", 1L), json);
}
}
| 8,515 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/test/java/org/apache/geronimo/microprofile/metrics/common | Create_ds/geronimo-metrics/geronimo-metrics-common/src/test/java/org/apache/geronimo/microprofile/metrics/common/prometheus/PrometheusFormatterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common.prometheus;
import static java.util.Collections.singletonMap;
import static org.junit.Assert.assertEquals;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.geronimo.microprofile.metrics.common.RegistryImpl;
import org.eclipse.microprofile.metrics.Gauge;
import org.eclipse.microprofile.metrics.Metric;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.junit.Test;
public class PrometheusFormatterTest {
@Test
public void rename() {
final PrometheusFormatter prometheusFormatter = new PrometheusFormatter().enableOverriding();
final RegistryImpl registry = new RegistryImpl(MetricRegistry.Type.APPLICATION);
final Map<String, Metric> metrics = singletonMap("myMetric", (Gauge<Long>) () -> 1234L);
metrics.forEach(registry::register);
assertEquals(
"# TYPE sample_myMetric gauge\n" +
"sample_myMetric 1234.0\n",
prometheusFormatter.toText(registry, "sample", metrics).toString());
System.setProperty("geronimo.metrics.prometheus.mapping.sample:my_metric", "renamed");
prometheusFormatter.enableOverriding();
assertEquals(
"# TYPE sample_myMetric gauge\n" +
"sample_myMetric 1234.0\n",
prometheusFormatter.toText(registry, "sample", metrics).toString());
System.clearProperty("sample:my_metric");
System.setProperty("geronimo.metrics.prometheus.mapping.sample:my_metric", "renamed");
prometheusFormatter.enableOverriding(new Properties() {{
setProperty("sample_myMetric", "again");
}});
assertEquals(
"# TYPE again gauge\nagain 1234.0\n",
prometheusFormatter.toText(registry, "sample", metrics).toString());
}
@Test
public void filter() {
final PrometheusFormatter prometheusFormatter = new PrometheusFormatter().enableOverriding();
final RegistryImpl registry = new RegistryImpl(MetricRegistry.Type.APPLICATION);
final Map<String, Metric> metrics = new LinkedHashMap<>();
metrics.put("myMetric1", (Gauge<Long>) () -> 1234L);
metrics.put("myMetric2", (Gauge<Long>) () -> 1235L);
metrics.forEach(registry::register);
assertEquals(
"# TYPE sample_myMetric1 gauge\n" +
"sample_myMetric1 1234.0\n" +
"# TYPE sample_myMetric2 gauge\n" +
"sample_myMetric2 1235.0\n",
prometheusFormatter.toText(registry, "sample", metrics).toString());
prometheusFormatter.enableOverriding(new Properties() {{
setProperty("geronimo.metrics.filter.prefix", "sample_myMetric2");
}});
assertEquals(
"# TYPE sample_myMetric2 gauge\n" +
"sample_myMetric2 1235.0\n",
prometheusFormatter.toText(registry, "sample", metrics).toString());
}
}
| 8,516 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/test/java/org/apache/geronimo/microprofile/metrics/common | Create_ds/geronimo-metrics/geronimo-metrics-common/src/test/java/org/apache/geronimo/microprofile/metrics/common/jaxrs/SecurityValidatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common.jaxrs;
import java.net.URI;
import java.security.Principal;
import java.util.List;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.PathSegment;
import javax.ws.rs.core.SecurityContext;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
import org.junit.Test;
public class SecurityValidatorTest {
private static final SecurityContext ANONYMOUS = new SecurityContext() {
@Override
public Principal getUserPrincipal() {
return null;
}
@Override
public boolean isUserInRole(final String role) {
return false;
}
@Override
public boolean isSecure() {
return false;
}
@Override
public String getAuthenticationScheme() {
return null;
}
};
private static final SecurityContext LOGGED_NO_ROLE = new SecurityContext() {
@Override
public Principal getUserPrincipal() {
return () -> "somebody";
}
@Override
public boolean isUserInRole(final String role) {
return false;
}
@Override
public boolean isSecure() {
return false;
}
@Override
public String getAuthenticationScheme() {
return null;
}
};
private static final SecurityContext ADMIN = new SecurityContext() {
@Override
public Principal getUserPrincipal() {
return () -> "somebody";
}
@Override
public boolean isUserInRole(final String role) {
return "admin".equals(role);
}
@Override
public boolean isSecure() {
return false;
}
@Override
public String getAuthenticationScheme() {
return null;
}
};
private static final UriInfo REMOTE = uri("http://geronimo.somewhere");
private static final UriInfo LOCALHOST = uri("http://localhost");
@Test
public void localValid() {
new SecurityValidator() {{
init();
}}.checkSecurity(ANONYMOUS, LOCALHOST);
}
@Test(expected = WebApplicationException.class)
public void remoteInvalid() {
new SecurityValidator() {{
init();
}}.checkSecurity(ANONYMOUS, REMOTE);
}
@Test
public void roleValid() {
new SecurityValidator() {
{
init();
}
@Override
protected String config(final String key) {
return key.endsWith("acceptedRoles") ? "admin" : null;
}
}.checkSecurity(ADMIN, LOCALHOST);
}
@Test(expected = WebApplicationException.class)
public void roleAnonymousInvalid() {
new SecurityValidator() {
{
init();
}
@Override
protected String config(final String key) {
return key.endsWith("acceptedRoles") ? "admin" : null;
}
}.checkSecurity(ANONYMOUS, LOCALHOST);
}
@Test(expected = WebApplicationException.class)
public void roleLoggedButInvalid() {
new SecurityValidator() {
{
init();
}
@Override
protected String config(final String key) {
return key.endsWith("acceptedRoles") ? "admin" : null;
}
}.checkSecurity(LOGGED_NO_ROLE, LOCALHOST);
}
@Test
public void roleAndHostValid() {
new SecurityValidator() {
{
init();
}
@Override
protected String config(final String key) {
return key.endsWith("acceptedRoles") ? "admin" : "geronimo.somewhere";
}
}.checkSecurity(ADMIN, REMOTE);
}
private static UriInfo uri(final String request) {
return new UriInfoMock(request);
}
private static class UriInfoMock implements UriInfo {
private final URI request;
private UriInfoMock(final String request) {
this.request = URI.create(request);
}
@Override
public String getPath() {
return null;
}
@Override
public String getPath(boolean decode) {
return null;
}
@Override
public List<PathSegment> getPathSegments() {
return null;
}
@Override
public List<PathSegment> getPathSegments(boolean decode) {
return null;
}
@Override
public URI getRequestUri() {
return request;
}
@Override
public UriBuilder getRequestUriBuilder() {
return null;
}
@Override
public URI getAbsolutePath() {
return null;
}
@Override
public UriBuilder getAbsolutePathBuilder() {
return null;
}
@Override
public URI getBaseUri() {
return null;
}
@Override
public UriBuilder getBaseUriBuilder() {
return null;
}
@Override
public MultivaluedMap<String, String> getPathParameters() {
return null;
}
@Override
public MultivaluedMap<String, String> getPathParameters(boolean decode) {
return null;
}
@Override
public MultivaluedMap<String, String> getQueryParameters() {
return null;
}
@Override
public MultivaluedMap<String, String> getQueryParameters(boolean decode) {
return null;
}
@Override
public List<String> getMatchedURIs() {
return null;
}
@Override
public List<String> getMatchedURIs(boolean decode) {
return null;
}
@Override
public List<Object> getMatchedResources() {
return null;
}
@Override
public URI resolve(URI uri) {
return null;
}
@Override
public URI relativize(URI uri) {
return null;
}
}
}
| 8,517 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/SimpleGaugeImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common;
import org.eclipse.microprofile.metrics.Gauge;
import java.util.function.Supplier;
public class SimpleGaugeImpl<T> implements Gauge<T> {
private final Supplier<T> supplier;
public SimpleGaugeImpl(final Supplier<T> supplier) {
this.supplier = supplier;
}
@Override
public T getValue() {
return supplier.get();
}
}
| 8,518 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/CounterImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common;
import java.util.concurrent.atomic.LongAdder;
import org.eclipse.microprofile.metrics.Counter;
public class CounterImpl implements Counter {
private final LongAdder delegate = new LongAdder();
private final String unit;
public CounterImpl(final String unit) {
this.unit = unit;
}
@Override
public void inc() {
delegate.increment();
}
@Override
public void inc(final long n) {
delegate.add(n);
}
@Override
public long getCount() {
return delegate.sum();
}
}
| 8,519 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/SimpleTimerImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common;
import org.eclipse.microprofile.metrics.SimpleTimer;
import java.time.Clock;
import java.time.Duration;
import java.time.Instant;
import java.time.ZoneId;
import java.util.concurrent.Callable;
public class SimpleTimerImpl implements SimpleTimer {
private static final Clock MINUTE_CLOCK = Clock.tickMinutes(ZoneId.of("UTC"));
private static final Clock CLOCK = Clock.systemUTC();
// same logic than ConcurrentGaugeImpl
private volatile Instant currentMinute = CLOCK.instant();
private volatile Duration current;
private volatile Duration min;
private volatile Duration max;
private volatile Duration previousMin;
private volatile Duration previousMax;
private volatile long count;
private final String unit;
public SimpleTimerImpl(final String unit) {
this.unit = unit;
}
public String getUnit() {
return unit;
}
@Override
public void update(final Duration duration) {
if (duration.isNegative()) {
return;
}
maybeRotate();
synchronized (this) {
count++;
current = duration;
if (max == null || duration.toMillis() > max.toMillis()) {
max = duration;
}
if (min == null || duration.toMillis() < min.toMillis()) {
min = duration;
}
}
}
@Override
public <T> T time(final Callable<T> callable) throws Exception {
final Instant startTime = CLOCK.instant();
try {
return callable.call();
} finally {
update(Duration.between(startTime, CLOCK.instant()));
}
}
@Override
public void time(final Runnable runnable) {
try {
time(() -> {
runnable.run();
return null;
});
} catch (final RuntimeException re) {
throw re;
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public Context time() {
return new ContextImpl();
}
@Override
public Duration getElapsedTime() {
return current;
}
@Override
public long getCount() {
return count;
}
@Override
public Duration getMaxTimeDuration() {
maybeRotate();
return previousMax;
}
@Override
public Duration getMinTimeDuration() {
maybeRotate();
return previousMin;
}
private void maybeRotate() {
final Instant now = MINUTE_CLOCK.instant();
if (now.isAfter(currentMinute)) {
synchronized (this) {
if (now.isAfter(currentMinute)) {
rotate(now);
}
}
}
}
private void rotate(final Instant now) {
previousMax = max;
previousMin = min;
max = min = null;
currentMinute = now;
}
private class ContextImpl implements Context {
private final Instant start = CLOCK.instant();
@Override
public long stop() {
final Duration duration = Duration.between(start, CLOCK.instant());
update(duration);
return duration.toNanos();
}
@Override
public void close() {
stop();
}
}
}
| 8,520 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/HistogramImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common;
import static java.util.Comparator.comparing;
import static java.util.stream.Collectors.toMap;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.LongStream;
import java.util.stream.Stream;
import javax.json.bind.annotation.JsonbTransient;
import org.eclipse.microprofile.metrics.Histogram;
import org.eclipse.microprofile.metrics.Snapshot;
// impl adapted from apache sirona
public class HistogramImpl implements Histogram {
// potential config
private static final double ALPHA = Double.parseDouble(System.getProperty("geronimo.metrics.storage.alpha", "0.015"));
private static final int BUCKET_SIZE = Integer.getInteger("geronimo.metrics.storage.size", 1024);
private static final long REFRESH_INTERVAL = TimeUnit.HOURS.toNanos(1);
private static final Value[] EMPTY_ARRAY = new Value[0];
private final String unit;
private final ReadWriteLock lock = new ReentrantReadWriteLock();
private final AtomicLong count = new AtomicLong();
private final ConcurrentSkipListMap<Double, Value> bucket = new ConcurrentSkipListMap<>();
private final AtomicLong nextRefreshTime = new AtomicLong(System.nanoTime() + REFRESH_INTERVAL);
private volatile long startTime = nowSec();
public HistogramImpl(final String unit) {
this.unit = unit;
}
@Override
public void update(final int value) {
update((long) value);
}
@Override
public synchronized void update(final long value) {
add(value);
}
@Override
public long getCount() {
return count.get();
}
@Override
@JsonbTransient
public Snapshot getSnapshot() {
return snapshot();
}
public String getUnit() {
return unit;
}
public double getP50() {
return getSnapshot().getMedian();
}
public double getP75() {
return getSnapshot().get75thPercentile();
}
public double getP95() {
return getSnapshot().get95thPercentile();
}
public double getP98() {
return getSnapshot().get98thPercentile();
}
public double getP99() {
return getSnapshot().get99thPercentile();
}
public double getP999() {
return getSnapshot().get999thPercentile();
}
public long getMax() {
return getSnapshot().getMax();
}
public double getMean() {
return getSnapshot().getMean();
}
public long getMin() {
return getSnapshot().getMin();
}
public double getStddev() {
return getSnapshot().getStdDev();
}
private long nowSec() {
return TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
}
public void add(final long value) {
ensureUpToDate();
final Lock lock = this.lock.readLock();
lock.lock();
try {
final Value sample = new Value(value, Math.exp(ALPHA * (nowSec() - startTime)));
final double priority = sample.weight / Math.random();
final long size = count.incrementAndGet();
if (size <= BUCKET_SIZE) {
bucket.put(priority, sample);
} else { // iterate through the bucket until we need removing low priority entries to get a new space
double first = bucket.firstKey();
if (first < priority && bucket.putIfAbsent(priority, sample) == null) {
while (bucket.remove(first) == null) {
first = bucket.firstKey();
}
}
}
} finally {
lock.unlock();
}
}
private void ensureUpToDate() {
final long next = nextRefreshTime.get();
final long now = System.nanoTime();
if (now < next) {
return;
}
final Lock lock = this.lock.writeLock();
lock.lock();
try {
if (nextRefreshTime.compareAndSet(next, now + REFRESH_INTERVAL)) {
final long oldStartTime = startTime;
startTime = nowSec();
final double updateFactor = Math.exp(-ALPHA * (startTime - oldStartTime));
if (updateFactor != 0.) {
bucket.putAll(new ArrayList<>(bucket.keySet()).stream()
.collect(toMap(k -> k * updateFactor, k -> {
final Value previous = bucket.remove(k);
return new Value(previous.value, previous.weight * updateFactor);
})));
count.set(bucket.size()); // N keys can lead to the same key so we must update it
} else {
bucket.clear();
count.set(0);
}
}
} finally {
lock.unlock();
}
}
public Snapshot snapshot() {
ensureUpToDate();
final Lock lock = this.lock.readLock();
lock.lock();
try {
return new SnapshotImpl(bucket.values().toArray(EMPTY_ARRAY));
} finally {
lock.unlock();
}
}
private static final class Value {
private final long value;
private final double weight;
private Value(final long value, final double weight) {
this.value = value;
this.weight= weight;
}
}
private static class SnapshotImpl extends Snapshot {
private final Value[] values;
private Value[] sorted;
private SnapshotImpl(final Value[] values) {
this.values = values;
// no high computation here, we are under lock + all methods are not called in general
}
@Override
public int size() {
return values.length;
}
@Override
public long[] getValues() {
return values(sorted()).toArray();
}
@Override
public long getMax() {
if (values.length == 0) {
return 0;
}
if (sorted != null) {
return sorted[sorted.length - 1].value;
}
return values(values).max().orElse(0);
}
@Override
public long getMin() {
if (values.length == 0) {
return 0;
}
if (sorted != null) {
return sorted[0].value;
}
return values(values).min().orElse(0);
}
@Override
public double getMean() {
if (values.length == 0) {
return 0;
}
return values(values).sum() * 1. / values.length;
}
@Override
public double getStdDev() {
if (values.length <= 1) {
return 0;
}
final double mean = getMean();
final double sumWeight = Stream.of(values).mapToDouble(i -> i.weight).sum();
return Math.sqrt(Stream.of(values)
.mapToDouble(v -> Math.pow(v.value - mean, 2) * (v.weight / sumWeight))
.sum());
}
@Override
public void dump(final OutputStream output) {
values(sorted()).forEach(v -> {
try {
output.write((v + "\n").getBytes(StandardCharsets.UTF_8));
} catch (final IOException e) {
throw new IllegalStateException(e);
}
});
}
@Override
public double getValue(final double quantile) {
if (!(quantile >= 0 || quantile <= 1)) {
throw new IllegalArgumentException("Quantile " + quantile + " is invalid");
}
if (values.length == 0) {
return 0;
}
if (values.length == 1) {
return values[0].value;
}
final int idx = (int) Math.floor((values.length - 1) * quantile);
return sorted()[idx].value;
}
private Value[] sorted() {
if (sorted == null) {
synchronized (this) {
if (sorted == null) {
sorted = new Value[values.length];
System.arraycopy(values, 0, sorted, 0, values.length);
Arrays.sort(sorted, comparing(i -> i.value));
}
}
}
return sorted;
}
private LongStream values(final Value[] values) {
return Stream.of(values).mapToLong(i -> i.value);
}
}
}
| 8,521 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/RegistryImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common;
import org.eclipse.microprofile.metrics.ConcurrentGauge;
import org.eclipse.microprofile.metrics.Counter;
import org.eclipse.microprofile.metrics.Gauge;
import org.eclipse.microprofile.metrics.Histogram;
import org.eclipse.microprofile.metrics.Metadata;
import org.eclipse.microprofile.metrics.Meter;
import org.eclipse.microprofile.metrics.Metric;
import org.eclipse.microprofile.metrics.MetricFilter;
import org.eclipse.microprofile.metrics.MetricID;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.MetricType;
import org.eclipse.microprofile.metrics.MetricUnits;
import org.eclipse.microprofile.metrics.SimpleTimer;
import org.eclipse.microprofile.metrics.Tag;
import org.eclipse.microprofile.metrics.Timer;
import java.util.Map;
import java.util.Objects;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import java.util.function.Supplier;
import static java.util.stream.Collectors.toCollection;
import static java.util.stream.Collectors.toMap;
public class RegistryImpl implements MetricRegistry {
private static final Tag[] NO_TAG = new Tag[0];
private final Type type;
private final Tag[] globalTags;
private final ConcurrentMap<MetricID, Holder<? extends Metric>> metrics = new ConcurrentHashMap<>();
public RegistryImpl(final Type type) {
this(type, new Tag[0]);
}
public RegistryImpl(final Type type, final Tag[] globalTags) {
this.type = type;
this.globalTags = globalTags;
}
public Tag[] getGlobalTags() {
return globalTags;
}
@Override
public <T extends Metric> T register(final Metadata metadata, final T metric) throws IllegalArgumentException {
return register(metadata, metric, NO_TAG);
}
@Override
public <T extends Metric> T register(final Metadata metadata, final T metric, final Tag... tags) throws IllegalArgumentException {
final MetricID metricID = new MetricID(metadata.getName(), tags);
final Holder<? extends Metric> present = metrics.putIfAbsent(
metricID, new Holder<>(metric, metadata, metricID));
return present != null ? (T) present.metric : metric;
}
@Override
public <T extends Metric> T register(final String name, final T metric) throws IllegalArgumentException {
final MetricType type;
if (Counter.class.isInstance(metric)) {
type = MetricType.COUNTER;
} else if (Gauge.class.isInstance(metric)) {
type = MetricType.GAUGE;
} else if (Meter.class.isInstance(metric)) {
type = MetricType.METERED;
} else if (Timer.class.isInstance(metric)) {
type = MetricType.TIMER;
} else if (Histogram.class.isInstance(metric)) {
type = MetricType.HISTOGRAM;
} else if (ConcurrentGauge.class.isInstance(metric)) {
type = MetricType.CONCURRENT_GAUGE;
} else {
type = MetricType.INVALID;
}
return register(Metadata.builder().withName(name).withType(type).build(), metric);
}
@Override
public Counter getCounter(MetricID metricID) {
return getMetric(metricID, Counter.class);
}
@Override
public Counter counter(final String name) {
return counter(Metadata.builder().withName(name).withType(MetricType.COUNTER).build(), NO_TAG);
}
@Override
public Counter counter(final String name, final Tag... tags) {
return counter(Metadata.builder().withName(name).withType(MetricType.COUNTER).build(), tags);
}
@Override
public Counter counter(final MetricID metricID) {
return counter(metricID.getName(), metricID.getTagsAsArray());
}
@Override
public Counter counter(final Metadata metadata) {
return counter(metadata, NO_TAG);
}
@Override
public Counter counter(final Metadata metadata, final Tag... tags) {
final MetricID metricID = new MetricID(metadata.getName(), tags);
Holder<? extends Metric> holder = metrics.get(metricID);
if (holder == null) {
holder = new Holder<>(new CounterImpl(
metadata.getUnit() == null ? MetricUnits.NONE : metadata.getUnit()), enforceType(metadata, MetricType.COUNTER), metricID);
final Holder<? extends Metric> existing = metrics.putIfAbsent(holder.metricID, holder);
if (existing != null) {
holder = existing;
}
}
if (!Counter.class.isInstance(holder.metric)) {
throw new IllegalArgumentException(holder.metric + " is not a counter");
}
return Counter.class.cast(holder.metric);
}
@Override
public ConcurrentGauge getConcurrentGauge(MetricID metricID) {
return getMetric(metricID, ConcurrentGauge.class);
}
@Override
public ConcurrentGauge concurrentGauge(final String name) {
return concurrentGauge(Metadata.builder().withName(name).withType(MetricType.CONCURRENT_GAUGE).build());
}
@Override
public ConcurrentGauge concurrentGauge(final String name, final Tag... tags) {
return concurrentGauge(Metadata.builder().withName(name).withType(MetricType.CONCURRENT_GAUGE).build(), tags);
}
@Override
public ConcurrentGauge concurrentGauge(final MetricID metricID) {
return concurrentGauge(metricID.getName(), metricID.getTagsAsArray());
}
@Override
public ConcurrentGauge concurrentGauge(final Metadata metadata) {
return concurrentGauge(metadata, NO_TAG);
}
@Override
public ConcurrentGauge concurrentGauge(final Metadata metadata, final Tag... tags) {
final MetricID metricID = new MetricID(metadata.getName(), tags);
Holder<? extends Metric> holder = metrics.get(metricID);
if (holder == null) {
holder = new Holder<>(new ConcurrentGaugeImpl(
metadata.getUnit() == null ? MetricUnits.NONE : metadata.getUnit()), enforceType(metadata, MetricType.CONCURRENT_GAUGE), metricID);
final Holder<? extends Metric> existing = metrics.putIfAbsent(holder.metricID, holder);
if (existing != null) {
holder = existing;
}
}
if (!ConcurrentGauge.class.isInstance(holder.metric)) {
throw new IllegalArgumentException(holder.metric + " is not a concurrent gauge");
}
return ConcurrentGauge.class.cast(holder.metric);
}
@Override
public <T, R extends Number> Gauge<R> gauge(final String name, final T object, final Function<T, R> func, Tag... tags) {
return gauge(new MetricID(name, tags), () -> func.apply(object));
}
@Override
public <T, R extends Number> Gauge<R> gauge(final MetricID metricID, final T object, final Function<T, R> func) {
return gauge(metricID, () -> func.apply(object));
}
@Override
public <T, R extends Number> Gauge<R> gauge(final Metadata metadata, final T object, final Function<T, R> func, final Tag... tags) {
final MetricID metricID = new MetricID(metadata.getName(), tags);
Holder<? extends Metric> holder = metrics.get(metricID);
if (holder == null) {
holder = new Holder<>(new SimpleGaugeImpl<>(() -> func.apply(object)), enforceType(metadata, MetricType.GAUGE), metricID);
final Holder<? extends Metric> existing = metrics.putIfAbsent(holder.metricID, holder);
if (existing != null) {
holder = existing;
}
}
if (!Gauge.class.isInstance(holder.metric)) {
throw new IllegalArgumentException(holder.metric + " is not a gauge");
}
return Gauge.class.cast(holder.metric);
}
@Override
public <T extends Number> Gauge<T> gauge(final String name, final Supplier<T> supplier, final Tag... tags) {
return gauge(new MetricID(name, tags), supplier);
}
@Override
public <T extends Number> Gauge<T> gauge(final MetricID metricID, final Supplier<T> supplier) {
Holder<? extends Metric> holder = metrics.get(metricID);
if (holder == null) {
holder = new Holder<>(
new SimpleGaugeImpl<>(supplier),
Metadata.builder().withName(metricID.getName()).withType(MetricType.GAUGE).build(),
metricID);
final Holder<? extends Metric> existing = metrics.putIfAbsent(holder.metricID, holder);
if (existing != null) {
holder = existing;
}
}
if (!Gauge.class.isInstance(holder.metric)) {
throw new IllegalArgumentException(holder.metric + " is not a gauge");
}
return Gauge.class.cast(holder.metric);
}
@Override
public <T extends Number> Gauge<T> gauge(final Metadata metadata, final Supplier<T> supplier, final Tag... tags) {
return register(metadata, new SimpleGaugeImpl<>(supplier), tags);
}
@Override
public Gauge<?> getGauge(MetricID metricID) {
return getMetric(metricID, Gauge.class);
}
@Override
public Histogram getHistogram(MetricID metricID) {
return this.getMetric(metricID, Histogram.class);
}
@Override
public Histogram histogram(final String name) {
return histogram(Metadata.builder().withName(name).withType(MetricType.HISTOGRAM).build());
}
@Override
public Histogram histogram(final String name, final Tag... tags) {
return histogram(Metadata.builder().withName(name).withType(MetricType.HISTOGRAM).build(), tags);
}
@Override
public Histogram histogram(final MetricID metricID) {
return histogram(metricID.getName(), metricID.getTagsAsArray());
}
@Override
public Histogram histogram(final Metadata metadata) {
return histogram(metadata, NO_TAG);
}
@Override
public Histogram histogram(final Metadata metadata, final Tag... tags) {
final MetricID metricID = new MetricID(metadata.getName(), tags);
Holder<? extends Metric> holder = metrics.get(metricID);
if (holder == null) {
holder = new Holder<>(new HistogramImpl(metadata.getUnit() == null ? MetricUnits.NONE : metadata.getUnit()), enforceType(metadata, MetricType.HISTOGRAM), metricID);
final Holder<? extends Metric> existing = metrics.putIfAbsent(metricID, holder);
if (existing != null) {
holder = existing;
}
}
if (!Histogram.class.isInstance(holder.metric)) {
throw new IllegalArgumentException(holder.metric + " is not a histogram");
}
return Histogram.class.cast(holder.metric);
}
@Override
public Meter getMeter(MetricID metricID) {
return getMetric(metricID, Meter.class);
}
@Override
public Meter meter(final String name) {
return meter(Metadata.builder().withName(name).withType(MetricType.METERED).build());
}
@Override
public Meter meter(final String name, final Tag... tags) {
return meter(Metadata.builder().withName(name).withType(MetricType.METERED).build(), tags);
}
@Override
public Meter meter(final MetricID metricID) {
return meter(metricID.getName(), metricID.getTagsAsArray());
}
@Override
public Meter meter(final Metadata metadata) {
return meter(metadata, NO_TAG);
}
@Override
public Meter meter(final Metadata metadata, final Tag... tags) {
final MetricID metricID = new MetricID(metadata.getName(), tags);
Holder<? extends Metric> holder = metrics.get(metricID);
if (holder == null) {
holder = new Holder<>(new MeterImpl(metadata.getUnit() == null ? MetricUnits.NONE : metadata.getUnit()), enforceType(metadata, MetricType.METERED), metricID);
final Holder<? extends Metric> existing = metrics.putIfAbsent(metricID, holder);
if (existing != null) {
holder = existing;
}
}
if (!Meter.class.isInstance(holder.metric)) {
throw new IllegalArgumentException(holder.metric + " is not a meter");
}
return Meter.class.cast(holder.metric);
}
@Override
public SimpleTimer getSimpleTimer(MetricID metricID) {
return getMetric(metricID, SimpleTimer.class);
}
@Override
public SimpleTimer simpleTimer(final MetricID metricID) {
return simpleTimer(metricID.getName(), metricID.getTagsAsArray());
}
@Override
public SimpleTimer simpleTimer(final Metadata metadata) {
return simpleTimer(metadata, NO_TAG);
}
@Override
public SimpleTimer simpleTimer(final String name) {
return simpleTimer(new MetricID(name));
}
@Override
public SimpleTimer simpleTimer(final String name, final Tag... tags) {
return simpleTimer(Metadata.builder().withName(name).withType(MetricType.SIMPLE_TIMER).build(), tags);
}
@Override
public SimpleTimer simpleTimer(final Metadata metadata, final Tag... tags) {
final MetricID metricID = new MetricID(metadata.getName(), tags);
Holder<? extends Metric> holder = metrics.get(metricID);
if (holder == null) {
holder = new Holder<>(new SimpleTimerImpl(metadata.getUnit() == null ? MetricUnits.NONE : metadata.getUnit()), enforceType(metadata, MetricType.SIMPLE_TIMER), metricID);
final Holder<? extends Metric> existing = metrics.putIfAbsent(metricID, holder);
if (existing != null) {
holder = existing;
}
}
if (!SimpleTimer.class.isInstance(holder.metric)) {
throw new IllegalArgumentException(holder.metric + " is not a timer");
}
return SimpleTimer.class.cast(holder.metric);
}
@Override
public Timer getTimer(MetricID metricID) {
return getMetric(metricID, Timer.class);
}
@Override
public Timer timer(final String name) {
return timer(Metadata.builder().withName(name).withType(MetricType.TIMER).build());
}
@Override
public Timer timer(final String name, final Tag... tags) {
return timer(Metadata.builder().withName(name).withType(MetricType.TIMER).build(), tags);
}
@Override
public Timer timer(final MetricID metricID) {
return timer(metricID.getName(), metricID.getTagsAsArray());
}
@Override
public Timer timer(final Metadata metadata) {
return timer(metadata, NO_TAG);
}
@Override
public Timer timer(final Metadata metadata, final Tag... tags) {
final MetricID metricID = new MetricID(metadata.getName(), tags);
Holder<? extends Metric> holder = metrics.get(metricID);
if (holder == null) {
holder = new Holder<>(new TimerImpl(metadata.getUnit() == null ? MetricUnits.NONE : metadata.getUnit()), enforceType(metadata, MetricType.TIMER), metricID);
final Holder<? extends Metric> existing = metrics.putIfAbsent(metricID, holder);
if (existing != null) {
holder = existing;
}
}
if (!Timer.class.isInstance(holder.metric)) {
throw new IllegalArgumentException(holder.metric + " is not a timer");
}
return Timer.class.cast(holder.metric);
}
@Override
public Metric getMetric(final MetricID metricID) {
final Holder<? extends Metric> holder = metrics.get(metricID);
return holder == null ? null : holder.metric;
}
@Override
public <T extends Metric> T getMetric(final MetricID metricID, final Class<T> asType) {
try {
return asType.cast(getMetric(metricID));
} catch (ClassCastException e) {
throw new IllegalArgumentException(metricID + " was not of expected type " + asType, e);
}
}
@Override
public boolean remove(final String name) {
final AtomicBoolean done = new AtomicBoolean(false);
removeMatching((metricID, metric) -> {
final boolean equals = Objects.equals(metricID.getName(), name);
if (equals) {
done.set(true);
}
return equals;
});
return done.get();
}
@Override
public boolean remove(final MetricID metricID) {
return metrics.remove(metricID) != null;
}
@Override
public void removeMatching(final MetricFilter filter) {
metrics.entrySet().removeIf(it -> filter.matches(it.getKey(), it.getValue().metric));
}
@Override
public SortedSet<String> getNames() {
return metrics.keySet().stream().map(MetricID::getName).collect(toCollection(TreeSet::new));
}
@Override
public SortedSet<MetricID> getMetricIDs() {
return new TreeSet<>(metrics.keySet());
}
@Override
public SortedMap<MetricID, Gauge> getGauges() {
return getGauges(MetricFilter.ALL);
}
@Override
public SortedMap<MetricID, Gauge> getGauges(final MetricFilter filter) {
return filterByType(filter, Gauge.class);
}
@Override
public SortedMap<MetricID, Counter> getCounters() {
return getCounters(MetricFilter.ALL);
}
@Override
public SortedMap<MetricID, Counter> getCounters(final MetricFilter filter) {
return filterByType(filter, Counter.class);
}
@Override
public SortedMap<MetricID, ConcurrentGauge> getConcurrentGauges() {
return getConcurrentGauges(MetricFilter.ALL);
}
@Override
public SortedMap<MetricID, ConcurrentGauge> getConcurrentGauges(final MetricFilter filter) {
return filterByType(filter, ConcurrentGauge.class);
}
@Override
public SortedMap<MetricID, Histogram> getHistograms() {
return getHistograms(MetricFilter.ALL);
}
@Override
public SortedMap<MetricID, Histogram> getHistograms(final MetricFilter filter) {
return filterByType(filter, Histogram.class);
}
@Override
public SortedMap<MetricID, Meter> getMeters() {
return getMeters(MetricFilter.ALL);
}
@Override
public SortedMap<MetricID, Meter> getMeters(final MetricFilter filter) {
return filterByType(filter, Meter.class);
}
@Override
public SortedMap<MetricID, Timer> getTimers() {
return getTimers(MetricFilter.ALL);
}
@Override
public SortedMap<MetricID, Timer> getTimers(final MetricFilter filter) {
return filterByType(filter, Timer.class);
}
@Override
public SortedMap<MetricID, SimpleTimer> getSimpleTimers() {
return filterByType(MetricFilter.ALL, SimpleTimer.class);
}
@Override
public SortedMap<MetricID, SimpleTimer> getSimpleTimers(final MetricFilter filter) {
return filterByType(filter, SimpleTimer.class);
}
@Override
public SortedMap<MetricID, Metric> getMetrics(final MetricFilter metricFilter) {
return filterByType(metricFilter, Metric.class);
}
@Override
public <T extends Metric> SortedMap<MetricID, T> getMetrics(Class<T> ofType, MetricFilter filter) {
return (SortedMap<MetricID, T>) getMetrics(
(metricID, metric) -> filter.matches(metricID, metric)
&& ofType.isAssignableFrom(metric.getClass()));
}
@Override
public Map<MetricID, Metric> getMetrics() {
return metrics.entrySet().stream().collect(toMap(Map.Entry::getKey, e -> e.getValue().metric));
}
@Override
public Metadata getMetadata(final String name) {
final Holder<? extends Metric> holder = metrics.get(new MetricID(name));
return holder == null ? null : holder.metadata;
}
@Override
public Map<String, Metadata> getMetadata() {
return metrics.entrySet().stream()
.collect(toMap(e -> e.getKey().getName(), e -> e.getValue().metadata, (a, b) -> a));
}
@Override
public Type getType() {
return type;
}
private Metadata enforceType(final Metadata metadata, final MetricType type) {
if (metadata.getTypeRaw() == null || !type.equals(metadata.getTypeRaw())) {
return Metadata.builder(metadata).withType(type).build();
}
return metadata;
}
private <T extends Metric> SortedMap<MetricID, T> filterByType(final MetricFilter filter, final Class<T> type) {
return metrics.entrySet().stream()
.filter(it -> type.isInstance(it.getValue().metric))
.filter(it -> filter.matches(it.getKey(), it.getValue().metric))
.collect(toMap(Map.Entry::getKey, e -> type.cast(e.getValue().metric), (a, b) -> {
throw new IllegalArgumentException("can't merge metrics"); // impossible
}, TreeMap::new));
}
private static final class Holder<T extends Metric> {
private final T metric;
private final Metadata metadata;
private final MetricID metricID;
private Holder(final T metric, final Metadata metadata, final MetricID metricID) {
this.metric = metric;
this.metadata = Metadata.builder(metadata).build();
this.metricID = metricID;
}
}
}
| 8,522 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/TimerImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common;
import org.eclipse.microprofile.metrics.Histogram;
import org.eclipse.microprofile.metrics.Meter;
import org.eclipse.microprofile.metrics.Snapshot;
import org.eclipse.microprofile.metrics.Timer;
import javax.json.bind.annotation.JsonbProperty;
import javax.json.bind.annotation.JsonbTransient;
import java.time.Duration;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.LongAdder;
public class TimerImpl implements Timer {
private final Histogram histogram;
private final Meter meter;
private final LongAdder elapsed = new LongAdder();
public TimerImpl(final String unit) {
this.histogram = new HistogramImpl(unit);
this.meter = new MeterImpl(unit);
}
@Override
public void update(final Duration duration) {
if (duration.isNegative()) {
return;
}
histogram.update(duration.toNanos());
meter.mark();
elapsed.add(duration.toNanos());
}
@Override
public <T> T time(final Callable<T> event) throws Exception {
try (final Context context = time()) {
return event.call();
}
}
@Override
public void time(final Runnable event) {
try {
time(() -> {
event.run();
return null;
});
} catch (final RuntimeException e) {
throw e;
} catch (final Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public Context time() {
return new ContextImpl();
}
@Override
public Duration getElapsedTime() {
return Duration.ofNanos(elapsed.longValue());
}
@Override
public long getCount() {
return histogram.getCount();
}
@Override
@JsonbProperty("fifteenMinRate")
public double getFifteenMinuteRate() {
return meter.getFifteenMinuteRate();
}
@Override
@JsonbProperty("fiveMinRate")
public double getFiveMinuteRate() {
return meter.getFiveMinuteRate();
}
@Override
public double getMeanRate() {
return meter.getMeanRate();
}
@Override
@JsonbProperty("oneMinRate")
public double getOneMinuteRate() {
return meter.getOneMinuteRate();
}
@Override
@JsonbTransient
public Snapshot getSnapshot() {
return histogram.getSnapshot();
}
public double getP50() {
return getSnapshot().getMedian();
}
public double getP75() {
return getSnapshot().get75thPercentile();
}
public double getP95() {
return getSnapshot().get95thPercentile();
}
public double getP98() {
return getSnapshot().get98thPercentile();
}
public double getP99() {
return getSnapshot().get99thPercentile();
}
public double getP999() {
return getSnapshot().get999thPercentile();
}
public long getMax() {
return getSnapshot().getMax();
}
public double getMean() {
return getSnapshot().getMean();
}
public long getMin() {
return getSnapshot().getMin();
}
public double getStddev() {
return getSnapshot().getStdDev();
}
private class ContextImpl implements Context {
private final long start = System.nanoTime();
@Override
public long stop() {
final long duration = System.nanoTime() - start;
update(Duration.ofNanos(duration));
return duration;
}
@Override
public void close() {
stop();
}
}
}
| 8,523 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/GaugeImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import org.eclipse.microprofile.metrics.Gauge;
public class GaugeImpl<T> implements Gauge<T> {
private final Method method;
private final Object reference;
public GaugeImpl(final Object reference, final Method method) {
this.method = method;
this.reference = reference;
}
@Override
public T getValue() {
try {
return (T) method.invoke(reference);
} catch (final IllegalAccessException e) {
throw new IllegalStateException(e);
} catch (final InvocationTargetException e) {
throw new IllegalStateException(e.getCause());
}
}
}
| 8,524 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/ConcurrentGaugeImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common;
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
import java.util.concurrent.atomic.AtomicLong;
import org.eclipse.microprofile.metrics.ConcurrentGauge;
// this minute thing is stupid but what the TCK expect...todo: move to a scheduledexecutor to avoid that stupid cost
public class ConcurrentGaugeImpl implements ConcurrentGauge {
private static final Clock CLOCK = Clock.tickMinutes(ZoneId.of("UTC"));
private final AtomicLong delegate = new AtomicLong();
private final AtomicLong min = new AtomicLong();
private final AtomicLong max = new AtomicLong();
private volatile Instant currentMinute = CLOCK.instant();
private volatile long lastMax = 0;
private volatile long lastMin = 0;
private final String unit;
public ConcurrentGaugeImpl(final String unit) {
this.unit = unit;
}
public String getUnit() {
return unit;
}
@Override
public void inc() {
maybeRotate();
synchronized (this) {
final long value = delegate.incrementAndGet();
final long max = this.max.get();
if (max < value) {
this.max.set(value);
}
}
}
@Override
public void dec() {
maybeRotate();
synchronized (this) {
final long value = delegate.decrementAndGet();
final long min = this.min.get();
if (min < value) {
this.min.set(value);
}
}
}
@Override
public long getCount() {
maybeRotate();
return delegate.get();
}
@Override
public long getMax() {
maybeRotate();
return lastMax;
}
@Override
public long getMin() {
maybeRotate();
return lastMin;
}
private void maybeRotate() {
final Instant now = CLOCK.instant();
if (now.isAfter(currentMinute)) {
synchronized (this) {
if (now.isAfter(currentMinute)) {
final long count = delegate.get();
lastMin = min.getAndSet(count);
lastMax = max.getAndSet(count);
min.set(count);
max.set(count);
currentMinute = now;
}
}
}
}
}
| 8,525 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/MeterImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common;
import static org.apache.geronimo.microprofile.metrics.common.expdecay.ExponentialMovingAverage.forMinutes;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
import java.util.stream.Stream;
import javax.json.bind.annotation.JsonbProperty;
import org.apache.geronimo.microprofile.metrics.common.expdecay.ExponentialMovingAverage;
import org.eclipse.microprofile.metrics.Meter;
public class MeterImpl implements Meter {
private static final long REFRESH_INTERVAL = TimeUnit.SECONDS.toNanos(5);
private final AtomicLong lastRefresh = new AtomicLong(System.nanoTime());
private final LongAdder count = new LongAdder();
private final ExponentialMovingAverage rate15 = forMinutes(15);
private final ExponentialMovingAverage rate5 = forMinutes(5);
private final ExponentialMovingAverage rate1 = forMinutes(1);
private final long initNs = System.nanoTime();
private final String unit;
public MeterImpl(final String unit) {
this.unit = unit;
}
public String getUnit() {
return unit;
}
@Override
public void mark() {
mark(1);
}
@Override
public void mark(final long n) {
updateIfNeeded();
count.add(n);
rate1.add(n);
rate5.add(n);
rate15.add(n);
}
@Override
public long getCount() {
return count.sum();
}
@Override
@JsonbProperty("fifteenMinRate")
public double getFifteenMinuteRate() {
updateIfNeeded();
return rate15.rate();
}
@Override
@JsonbProperty("fiveMinRate")
public double getFiveMinuteRate() {
updateIfNeeded();
return rate5.rate();
}
@Override
@JsonbProperty("oneMinRate")
public double getOneMinuteRate() {
updateIfNeeded();
return rate1.rate();
}
@Override
public double getMeanRate() {
final long count = getCount();
if (count == 0) {
return 0;
}
final long duration = System.nanoTime() - initNs;
if (duration == 0) {
return 0;
}
final long seconds = TimeUnit.NANOSECONDS.toSeconds(duration);
if (seconds == 0) {
return 0;
}
return count * 1. / seconds;
}
private void updateIfNeeded() {
final long now = System.nanoTime();
final long previousRefresh = lastRefresh.get();
if (now - previousRefresh >= REFRESH_INTERVAL && lastRefresh.compareAndSet(previousRefresh, now)) {
lastRefresh.set(now);
Stream.of(rate1, rate5, rate15).forEach(ExponentialMovingAverage::refresh);
}
}
}
| 8,526 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/BaseMetrics.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common;
import java.lang.management.ClassLoadingMXBean;
import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.OperatingSystemMXBean;
import java.lang.management.RuntimeMXBean;
import java.lang.management.ThreadMXBean;
import java.util.List;
import java.util.function.LongSupplier;
import javax.json.bind.annotation.JsonbTransient;
import org.eclipse.microprofile.metrics.Counter;
import org.eclipse.microprofile.metrics.Gauge;
import org.eclipse.microprofile.metrics.Metadata;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.MetricType;
import org.eclipse.microprofile.metrics.MetricUnits;
import org.eclipse.microprofile.metrics.Tag;
// isnt it super weird to hardcode that instead of defining a JMX integration?
// also the gauge/counter choice is quite surprising sometimes
public class BaseMetrics {
private final MetricRegistry registry;
public BaseMetrics(final MetricRegistry registry) {
this.registry = registry;
}
public void register() {
final RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean();
registry.register(Metadata.builder()
.withName("jvm.uptime")
.withDisplayName("JVM Uptime")
.withDescription("Displays the start time of the Java virtual machine in milliseconds." +
"This attribute displays the approximate time when the Java virtual machine started.")
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.MILLISECONDS)
.build(), gauge(runtimeMXBean::getUptime));
final OperatingSystemMXBean operatingSystemMXBean = ManagementFactory.getOperatingSystemMXBean();
registry.register(Metadata.builder()
.withName("cpu.availableProcessors")
.withDisplayName("Available Processors")
.withDescription("Displays the number of processors available to the Java virtual machine. " +
"This value may change during a particular invocation of the virtual machine.")
.withType(MetricType.GAUGE).withUnit(MetricUnits.NONE).build(), gauge(operatingSystemMXBean::getAvailableProcessors));
final ClassLoadingMXBean classLoadingMXBean = ManagementFactory.getClassLoadingMXBean();
registry.register(Metadata.builder()
.withName("classloader.unloadedClasses.count")
.withDisplayName("Current Loaded Class Count")
.withDescription("Displays the number of classes that are currently loaded in the Java virtual machine.")
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.NONE)
.build(),
gauge(classLoadingMXBean::getUnloadedClassCount));
registry.register(Metadata.builder()
.withName("classloader.unloadedClasses.total")
.withDisplayName("Current Loaded Class Total")
.withDescription("Displays the number of classes that are currently loaded in the Java virtual machine.")
.withType(MetricType.COUNTER)
.withUnit(MetricUnits.NONE)
.build(),
counter(classLoadingMXBean::getUnloadedClassCount));
registry.register(Metadata.builder()
.withName("classloader.loadedClasses.count")
.withDisplayName("Total Loaded Class Count")
.withDescription("Displays the total number of classes that have been loaded since the Java virtual machine has started execution.")
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.NONE)
.build(),
gauge(classLoadingMXBean::getTotalLoadedClassCount));
registry.register(Metadata.builder()
.withName("classloader.loadedClasses.total")
.withDisplayName("Total Loaded Class Count")
.withDescription("Displays the total number of classes that have been loaded since the Java virtual machine has started execution.")
.withType(MetricType.COUNTER)
.withUnit(MetricUnits.NONE)
.build(),
counter(classLoadingMXBean::getTotalLoadedClassCount));
final ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean();
registry.register(Metadata.builder()
.withName("thread.count")
.withDisplayName("Thread Count")
.withDescription("Displays the current number of live threads including both daemon and non-daemon threads")
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.NONE)
.build(),
gauge(threadMXBean::getThreadCount));
registry.register(Metadata.builder()
.withName("thread.daemon.count")
.withDisplayName("Daemon Thread Count")
.withDescription("Displays the current number of live daemon threads.")
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.NONE)
.build(),
gauge(threadMXBean::getDaemonThreadCount));
registry.register(Metadata.builder()
.withName("thread.max.count")
.withDisplayName("Peak Thread Count")
.withDescription("Displays the peak live thread count since the Java virtual machine started or peak was reset." +
"This includes daemon and non-daemon threads.")
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.NONE)
.build(),
gauge(threadMXBean::getPeakThreadCount));
final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
registry.register(Metadata.builder()
.withName("memory.usedHeap")
.withDisplayName("Used Heap Memory")
.withDescription("Displays the amount of used heap memory in bytes.")
.withType(MetricType.GAUGE).withUnit(MetricUnits.BYTES).build(), gauge(memoryMXBean.getHeapMemoryUsage()::getUsed));
registry.register(Metadata.builder()
.withName("memory.committedHeap")
.withDisplayName("Committed Heap Memory")
.withDescription("Displays the amount of memory in bytes that is committed for the Java virtual machine to use. " +
"This amount of memory is guaranteed for the Java virtual machine to use.")
.withType(MetricType.GAUGE).withUnit(MetricUnits.BYTES).build(), gauge(memoryMXBean.getHeapMemoryUsage()::getCommitted));
registry.register(Metadata.builder()
.withName("memory.maxHeap")
.withDisplayName("Max Heap Memory")
.withDescription("Displays the maximum amount of heap memory in bytes that can be used for memory management. " +
"This attribute displays -1 if the maximum heap memory size is undefined. " +
"This amount of memory is not guaranteed to be available for memory management if it is greater than " +
"the amount of committed memory. The Java virtual machine may fail to allocate memory even " +
"if the amount of used memory does not exceed this maximum size.")
.withType(MetricType.GAUGE).withUnit(MetricUnits.BYTES).build(), gauge(memoryMXBean.getHeapMemoryUsage()::getMax));
final List<GarbageCollectorMXBean> garbageCollectorMXBeans = ManagementFactory.getGarbageCollectorMXBeans();
garbageCollectorMXBeans.forEach(garbageCollectorMXBean -> {
registry.register(Metadata.builder()
.withName("gc.total")
.withDisplayName("Garbage Collection Count")
.withDescription("Displays the total number of collections that have occurred." +
"This attribute lists -1 if the collection count is undefined for this collector.")
.withType(MetricType.COUNTER)
.withUnit(MetricUnits.NONE)
.build(),
counter(garbageCollectorMXBean::getCollectionCount),
new Tag("name", garbageCollectorMXBean.getName()));
registry.register(Metadata.builder()
.withName("gc.time")
.withDisplayName("Garbage Collection Time")
.withDescription("Displays the approximate accumulated collection elapsed time in milliseconds." +
"This attribute displays -1 if the collection elapsed time is undefined for this collector." +
"The Java virtual machine implementation may use a high resolution timer to measure the elapsed time." +
"This attribute may display the same value even if the collection count has been incremented if" +
"the collection elapsed time is very short.")
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.MILLISECONDS)
.build(),
gauge(garbageCollectorMXBean::getCollectionTime),
new Tag("name", garbageCollectorMXBean.getName()));
});
}
private Gauge<Long> gauge(final LongSupplier supplier) {
return new Gauge<Long>() {
@Override
@JsonbTransient
public Long getValue() {
return supplier.getAsLong();
}
};
}
private Counter counter(final LongSupplier supplier) {
return new Counter() {
@Override
public void inc() {
// no-op
}
@Override
public void inc(final long n) {
// no-op
}
@Override
public long getCount() {
return supplier.getAsLong();
}
};
}
}
| 8,527 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/expdecay/ExponentialMovingAverage.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common.expdecay;
import static java.util.concurrent.TimeUnit.SECONDS;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.LongAdder;
public class ExponentialMovingAverage {
private static final double INTERVAL = SECONDS.toNanos(5);
private static final double RATE_RATIO = TimeUnit.SECONDS.toNanos(1);
private final LongAdder accumulator = new LongAdder();
private final double alpha;
private volatile double rate = 0.0;
private ExponentialMovingAverage(final double alpha) {
this.alpha = alpha;
}
public double rate() {
return rate * RATE_RATIO;
}
public void add(final long n) {
accumulator.add(n);
}
public void refresh() {
final long count = accumulator.sumThenReset();
final double instantRate = count / INTERVAL;
final double newRate = rate == 0. ? instantRate : nextRate(instantRate);
this.rate = newRate;
}
private double nextRate(final double instantRate) {
return rate + (alpha * (instantRate - rate));
}
public static ExponentialMovingAverage forMinutes(final int minutes) {
return new ExponentialMovingAverage(Math.exp(-5/*INTERVAL in sec*/ / 60. / minutes));
}
}
| 8,528 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/prometheus/PrometheusFormatter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common.prometheus;
import org.eclipse.microprofile.metrics.ConcurrentGauge;
import org.eclipse.microprofile.metrics.Counter;
import org.eclipse.microprofile.metrics.Gauge;
import org.eclipse.microprofile.metrics.Histogram;
import org.eclipse.microprofile.metrics.Metadata;
import org.eclipse.microprofile.metrics.Meter;
import org.eclipse.microprofile.metrics.Metered;
import org.eclipse.microprofile.metrics.Metric;
import org.eclipse.microprofile.metrics.MetricID;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.MetricType;
import org.eclipse.microprofile.metrics.MetricUnits;
import org.eclipse.microprofile.metrics.SimpleTimer;
import org.eclipse.microprofile.metrics.Snapshot;
import org.eclipse.microprofile.metrics.Tag;
import org.eclipse.microprofile.metrics.Timer;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Modifier;
import java.time.Duration;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Stream;
import static java.lang.Math.pow;
import static java.util.Optional.of;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
import static java.util.stream.Collectors.toSet;
// note: we keep this name for backward compat but this is now an "openmetrics" formatter
// todo: cache all the keys, can easily be done decorating the registry and enriching metadata (ExtendedMetadata/MetricsID)
public class PrometheusFormatter {
protected final Set<Object> validUnits;
protected final Map<String, String> keyMapping = new HashMap<>();
protected Predicate<String> prefixFilter = null;
protected Tag[] globalTags;
public PrometheusFormatter() {
validUnits = Stream.of(MetricUnits.class.getDeclaredFields())
.filter(f -> !"NONE".equals(f.getName()) && Modifier.isStatic(f.getModifiers()) && Modifier.isPublic(f.getModifiers()) && String.class == f.getType())
.map(f -> {
try {
return f.get(null);
} catch (final IllegalAccessException e) {
throw new IllegalStateException(e);
}
})
.collect(toSet());
}
public PrometheusFormatter enableOverriding(final Properties properties) {
properties.stringPropertyNames().forEach(k -> keyMapping.put(k, properties.getProperty(k)));
afterOverride();
return this;
}
public PrometheusFormatter withGlobalTags(final Tag[] globalTags) {
this.globalTags = globalTags;
return this;
}
public PrometheusFormatter enableOverriding() {
try (final InputStream source = Thread.currentThread().getContextClassLoader()
.getResourceAsStream("META-INF/geronimo-metrics/prometheus-mapping.properties")) {
if (source != null) {
final Properties properties = new Properties();
properties.load(source);
enableOverriding(properties);
}
} catch (final IOException e) {
// no-op
}
System.getProperties().stringPropertyNames().stream()
.filter(it -> it.startsWith("geronimo.metrics.prometheus.mapping."))
.forEach(k -> keyMapping.put(k.substring("geronimo.metrics.prometheus.mapping.".length()), System.getProperty(k)));
afterOverride();
return this;
}
private void afterOverride() {
final String prefix = keyMapping.get("geronimo.metrics.filter.prefix");
if (prefix == null) {
prefixFilter = null;
} else {
final List<String> prefixes = Stream.of(prefix.split(","))
.map(String::trim)
.filter(it -> !it.isEmpty())
.collect(toList());
final Predicate<String> directPredicate = name -> prefixes.stream().anyMatch(name::startsWith);
prefixFilter = name -> directPredicate.test(name) || directPredicate.test(keyMapping.getOrDefault(name, name));
}
}
public StringBuilder toText(final MetricRegistry registry,
final String registryKey,
final Map<String, Metric> entries) {
final Map<String, Metadata> metadatas = registry.getMetadata();
final Map<Metric, MetricID> ids = registry.getMetrics().entrySet().stream()
.collect(toMap(Map.Entry::getValue, Map.Entry::getKey));
return entries.entrySet().stream()
.map(it -> {
String key = it.getKey();
final int tagSep = key.indexOf(';');
if (tagSep > 0) {
key = key.substring(0, tagSep);
}
final Metadata metadata = metadatas.get(key);
return new Entry(metadata, registryKey + '_' + toPrometheusKey(metadata), it.getValue(), ids.get(it.getValue()));
})
.filter(it -> prefixFilter == null || prefixFilter.test(it.prometheusKey))
.map(entry -> {
final List<Tag> tagsAsList = getTags(entry);
switch (entry.metadata.getTypeRaw()) {
case COUNTER: {
String key = toPrometheusKey(entry.metadata);
if (!key.endsWith("_total")) {
key += "_total";
}
return counter(registryKey, entry, tagsAsList, key);
}
case CONCURRENT_GAUGE: {
final String key = toPrometheusKey(entry.metadata);
final ConcurrentGauge concurrentGauge = ConcurrentGauge.class.cast(entry.metric);
return concurrentGauge(registryKey, entry, tagsAsList, key, concurrentGauge);
}
case GAUGE: {
final Object value = Gauge.class.cast(entry.metric).getValue();
if (Number.class.isInstance(value)) {
final String key = toPrometheusKey(entry.metadata);
return gauge(registryKey, entry, tagsAsList, Number.class.cast(value), key);
}
return new StringBuilder();
}
case METERED: {
final Meter meter = Meter.class.cast(entry.metric);
final String keyBase = toPrometheus(entry.metadata);
return meter(registryKey, entry, tagsAsList, meter, keyBase);
}
case TIMER: {
final String keyBase = toPrometheus(entry.metadata);
final String keyUnit = toUnitSuffix(entry.metadata, false);
final Timer timer = Timer.class.cast(entry.metric);
return timer(registryKey, entry, tagsAsList, keyBase, keyUnit, timer);
}
case SIMPLE_TIMER: {
final String keyBase = toPrometheus(entry.metadata);
final String keyUnit = toUnitSuffix(entry.metadata, false);
final SimpleTimer timer = SimpleTimer.class.cast(entry.metric);
return simpleTimer(registryKey, entry, tagsAsList, keyBase, keyUnit, timer);
}
case HISTOGRAM:
final String keyBase = toPrometheus(entry.metadata);
final String keyUnit = toUnitSuffix(entry.metadata, false);
final Histogram histogram = Histogram.class.cast(entry.metric);
return histogram(registryKey, entry, tagsAsList, keyBase, keyUnit, histogram);
default:
return new StringBuilder();
}
})
.collect(StringBuilder::new, StringBuilder::append, StringBuilder::append);
}
private List<Tag> getTags(final Entry entry) {
return globalTags == null || globalTags.length == 0 ?
entry.metricID.getTagsAsList() :
Stream.concat(entry.metricID.getTagsAsList().stream(), Stream.of(globalTags))
.distinct()
.collect(toList());
}
private StringBuilder histogram(final String registryKey, final Entry entry, final List<Tag> tagsAsList, final String keyBase, final String keyUnit, final Histogram histogram) {
final String type = entry.metadata == null ? null : entry.metadata.getType();
return new StringBuilder()
.append(type(registryKey, keyBase + keyUnit + " summary", type))
.append(value(registryKey, keyBase + keyUnit + "_count", histogram.getCount(), type, entry.metadata, tagsAsList))
.append(toPrometheus(registryKey, keyBase, keyUnit, histogram.getSnapshot(), entry.metadata, tagsAsList));
}
private StringBuilder timer(final String registryKey, final Entry entry, final List<Tag> tagsAsList, final String keyBase, final String keyUnit, final Timer timer) {
final Duration elapsedTime = timer.getElapsedTime();
final String type = entry.metadata == null ? null : entry.metadata.getType();
return new StringBuilder()
.append(type(registryKey, keyBase + keyUnit + " summary", type))
.append(value(registryKey, keyBase + keyUnit + "_count", timer.getCount(), type, entry.metadata, tagsAsList))
.append(value(registryKey, keyBase + "_elapsedTime", elapsedTime == null ? 0 : elapsedTime.toNanos(), type, entry.metadata, tagsAsList))
.append(meter(registryKey, entry, tagsAsList, timer, keyBase))
.append(toPrometheus(registryKey, keyBase, keyUnit, timer.getSnapshot(), entry.metadata, tagsAsList));
}
private StringBuilder simpleTimer(final String registryKey, final Entry entry, final List<Tag> tagsAsList,
final String keyBase, final String keyUnit, final SimpleTimer timer) {
final Duration elapsedTime = timer.getElapsedTime();
final StringBuilder builder = new StringBuilder()
.append(type(registryKey, keyBase + keyUnit + " summary", "simple timer"))
.append(value(registryKey, keyBase + "_total", timer.getCount(), "counter", entry.metadata, tagsAsList))
.append(value(registryKey, keyBase + "_elapsedTime" + keyUnit, elapsedTime == null ? 0 : elapsedTime.toNanos(), "simple timer", entry.metadata, tagsAsList));
final Duration minTimeDuration = timer.getMinTimeDuration();
builder.append(value(registryKey, keyBase + "_minTimeDuration" + keyUnit, minTimeDuration == null ? Double.NaN : minTimeDuration.toNanos(), "simple timer", entry.metadata, tagsAsList));
final Duration maxTimeDuration = timer.getMaxTimeDuration();
builder.append(value(registryKey, keyBase + "_maxTimeDuration" + keyUnit, maxTimeDuration == null ? Double.NaN : maxTimeDuration.toNanos(), "simple timer", entry.metadata, tagsAsList));
return builder;
}
private StringBuilder meter(final String registryKey, final Entry entry, final List<Tag> tagsAsList, final Metered meter, final String keyBase) {
final String type = entry.metadata == null ? null : entry.metadata.getType();
return new StringBuilder()
.append(value(registryKey, keyBase + "_rate_per_second", meter.getMeanRate(), type, entry.metadata, tagsAsList))
.append(value(registryKey, keyBase + "_one_min_rate_per_second", meter.getOneMinuteRate(), type, entry.metadata, tagsAsList))
.append(value(registryKey, keyBase + "_five_min_rate_per_second", meter.getFiveMinuteRate(), type, entry.metadata, tagsAsList))
.append(value(registryKey, keyBase + "_fifteen_min_rate_per_second", meter.getFifteenMinuteRate(), type, entry.metadata, tagsAsList))
.append(value(registryKey, keyBase + "_total", meter.getCount(), type, entry.metadata, tagsAsList));
}
private StringBuilder gauge(final String registryKey, final Entry entry, final List<Tag> tagsAsList, final Number value, final String key) {
return new StringBuilder()
.append(value(registryKey, key, value.doubleValue(), entry.metadata == null ? null : entry.metadata.getType(), entry.metadata, tagsAsList));
}
private StringBuilder concurrentGauge(final String registryKey, final Entry entry, final List<Tag> tagsAsList, final String key, final ConcurrentGauge concurrentGauge) {
final String type = entry.metadata == null ? null : entry.metadata.getType();
return new StringBuilder()
.append(value(registryKey, key + "_current", concurrentGauge.getCount(), type, entry.metadata, tagsAsList))
.append(value(registryKey, key + "_min", concurrentGauge.getMin(), type, entry.metadata, tagsAsList))
.append(value(registryKey, key + "_max", concurrentGauge.getMax(), type, entry.metadata, tagsAsList));
}
private StringBuilder counter(final String registryKey, final Entry entry, final List<Tag> tagsAsList, final String key) {
return new StringBuilder()
.append(value(registryKey, key, Counter.class.cast(entry.metric).getCount(),
entry.metadata == null ? null : entry.metadata.getType(), entry.metadata, tagsAsList));
}
private StringBuilder toPrometheus(final String registryKey, final String keyBase, final String keyUnit,
final Snapshot snapshot, final Metadata metadata, final Collection<Tag> tags) {
final Function<Stream<Tag>, Collection<Tag>> metaFactory = newTags -> Stream.concat(
tags == null ? Stream.empty() : tags.stream(), newTags).distinct().collect(toList());
final String completeKey = keyBase + keyUnit;
final String type = metadata == null ? null : metadata.getType();
return new StringBuilder()
.append(value(registryKey, keyBase + "_min" + keyUnit, snapshot.getMin(), type, metadata, tags))
.append(value(registryKey, keyBase + "_max" + keyUnit, snapshot.getMax(), type, metadata, tags))
.append(value(registryKey, keyBase + "_mean" + keyUnit, snapshot.getMean(), type, metadata, tags))
.append(value(registryKey, keyBase + "_stddev" + keyUnit, snapshot.getStdDev(), type, metadata, tags))
.append(value(registryKey, completeKey, snapshot.getMedian(), type, metadata,
metaFactory.apply(Stream.of(new Tag("quantile", "0.5")))))
.append(value(registryKey, completeKey, snapshot.get75thPercentile(), type, metadata,
metaFactory.apply(Stream.of(new Tag("quantile", "0.75")))))
.append(value(registryKey, completeKey, snapshot.get95thPercentile(), type, metadata,
metaFactory.apply(Stream.of(new Tag("quantile", "0.95")))))
.append(value(registryKey, completeKey, snapshot.get98thPercentile(), type, metadata,
metaFactory.apply(Stream.of(new Tag("quantile", "0.98")))))
.append(value(registryKey, completeKey, snapshot.get99thPercentile(), type, metadata,
metaFactory.apply(Stream.of(new Tag("quantile", "0.99")))))
.append(value(registryKey, completeKey, snapshot.get999thPercentile(), type, metadata,
metaFactory.apply(Stream.of(new Tag("quantile", "0.999")))));
}
private String toPrometheusKey(final Metadata metadata) {
return toPrometheus(metadata) + toUnitSuffix(metadata, metadata.getTypeRaw() == MetricType.COUNTER);
}
private String toUnitSuffix(final Metadata metadata, final boolean enforceValid) {
final String unit = enforceValid ? getValidUnit(metadata) : (metadata.getUnit() == null ? MetricUnits.NONE : metadata.getUnit());
return MetricUnits.NONE.equalsIgnoreCase(unit) || (enforceValid && !validUnits.contains(unit)) ? "" : ("_" + toPrometheusUnit(unit));
}
private StringBuilder value(final String registryKey, final String key, final double value,
final String type, final Metadata metadata, final Collection<Tag> tags) {
final String builtKey = registryKey + '_' + key;
return new StringBuilder()
.append(type(registryKey, key, type))
.append(keyMapping.getOrDefault(builtKey, builtKey))
.append(of(tags)
.filter(t -> !t.isEmpty())
.map(t -> tags.stream()
.map(e -> e.getTagName() + "=\"" + e.getTagValue() + "\"")
.collect(joining(",", "{", "}")))
.orElse(""))
.append(' ').append(toPrometheusValue(getValidUnit(metadata), value)).append("\n");
}
private String getValidUnit(final Metadata metadata) {
final String unit = metadata.getUnit() == null ? MetricUnits.NONE : metadata.getUnit();
// for tck, we dont really want to prevent the user to add new units
// we should likely just check it exists in MetricUnits constant but it is too restrictive
if (unit.startsWith("jelly")) {
return MetricUnits.NONE;
}
return unit;
}
private StringBuilder type(final String registryKey, final String key, final String type) {
final String builtKey = registryKey + '_' + key;
final StringBuilder builder = new StringBuilder()
.append("# TYPE ").append(keyMapping.getOrDefault(builtKey, builtKey));
if (type != null) {
builder.append(' ').append(type);
}
return builder.append("\n");
}
private String toPrometheusUnit(final String unit) {
if (unit == null) {
return null;
}
switch (unit) {
case MetricUnits.BITS:
case MetricUnits.KILOBITS:
case MetricUnits.MEGABITS:
case MetricUnits.GIGABITS:
case MetricUnits.KIBIBITS:
case MetricUnits.MEBIBITS:
case MetricUnits.GIBIBITS:
case MetricUnits.BYTES:
case MetricUnits.KILOBYTES:
case MetricUnits.MEGABYTES:
case MetricUnits.GIGABYTES:
return "bytes";
case MetricUnits.NANOSECONDS:
case MetricUnits.MICROSECONDS:
case MetricUnits.MILLISECONDS:
case MetricUnits.SECONDS:
case MetricUnits.MINUTES:
case MetricUnits.HOURS:
case MetricUnits.DAYS:
return "seconds";
default:
return unit;
}
}
private double toPrometheusValue(final String unit, final double value) {
if (unit == null) {
return value;
}
switch (unit) {
case MetricUnits.BITS:
return value / 8;
case MetricUnits.KILOBITS:
return value * 1000 / 8;
case MetricUnits.MEGABITS:
return value * pow(1000, 2) / 8;
case MetricUnits.GIGABITS:
return value * pow(1000, 3) / 8;
case MetricUnits.KIBIBITS:
return value * 128;
case MetricUnits.MEBIBITS:
return value * pow(1024, 2);
case MetricUnits.GIBIBITS:
return value * pow(1024, 3);
case MetricUnits.BYTES:
return value;
case MetricUnits.KILOBYTES:
return value * 1000;
case MetricUnits.MEGABYTES:
return value * pow(1000, 2);
case MetricUnits.GIGABYTES:
return value * pow(1000, 3);
case MetricUnits.NANOSECONDS:
return value;
case MetricUnits.MICROSECONDS:
return value / 1000;
case MetricUnits.MILLISECONDS:
return value / pow(1000, 2);
case MetricUnits.SECONDS:
return value / pow(1000, 3);
case MetricUnits.MINUTES:
return value * 60 / pow(1000, 3);
case MetricUnits.HOURS:
return value * pow(60, 2) / pow(1000, 3);
case MetricUnits.DAYS:
return value * pow(60, 2) * 24 / pow(1000, 3);
default:
return value;
}
}
private String toPrometheus(final Metadata id) {
return id.getName()
.replaceAll("[^\\w]+", "_")
.replace("__", "_")
.replace(":_", ":");
}
private static class Entry {
private final Metadata metadata;
private final String prometheusKey;
private final Metric metric;
private final MetricID metricID;
private Entry(final Metadata metadata, final String prometheusKey, final Metric metric,
final MetricID metricID) {
this.metadata = metadata;
this.prometheusKey = prometheusKey;
this.metric = metric;
this.metricID = metricID;
}
}
}
| 8,529 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/prometheus/OpenMetricsFormatter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common.prometheus;
// just to expose it "correctly" named
public class OpenMetricsFormatter extends PrometheusFormatter {
}
| 8,530 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/jaxrs/MetricsEndpoints.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common.jaxrs;
import org.apache.geronimo.microprofile.metrics.common.RegistryImpl;
import org.apache.geronimo.microprofile.metrics.common.prometheus.PrometheusFormatter;
import org.eclipse.microprofile.metrics.ConcurrentGauge;
import org.eclipse.microprofile.metrics.Counter;
import org.eclipse.microprofile.metrics.Gauge;
import org.eclipse.microprofile.metrics.Histogram;
import org.eclipse.microprofile.metrics.Metadata;
import org.eclipse.microprofile.metrics.Meter;
import org.eclipse.microprofile.metrics.Metered;
import org.eclipse.microprofile.metrics.Metric;
import org.eclipse.microprofile.metrics.MetricID;
import org.eclipse.microprofile.metrics.MetricRegistry;
import org.eclipse.microprofile.metrics.SimpleTimer;
import org.eclipse.microprofile.metrics.Snapshot;
import org.eclipse.microprofile.metrics.Tag;
import org.eclipse.microprofile.metrics.Timer;
import javax.json.JsonValue;
import javax.ws.rs.GET;
import javax.ws.rs.OPTIONS;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
import javax.ws.rs.core.UriInfo;
import java.time.Duration;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import java.util.function.Function;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.Optional.ofNullable;
import static java.util.function.Function.identity;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toMap;
@Path("metrics")
public class MetricsEndpoints {
private final Pattern semicolon = Pattern.compile(";");
private MetricRegistry baseRegistry;
private MetricRegistry vendorRegistry;
private MetricRegistry applicationRegistry;
private Tag[] globalTags = new Tag[0]; // ensure forgetting to call init() is tolerated for backward compatibility
private SecurityValidator securityValidator = new SecurityValidator() {
{
init();
}
};
private PrometheusFormatter prometheus = new PrometheusFormatter().enableOverriding();
protected void init() {
globalTags = Stream.of(baseRegistry, vendorRegistry, applicationRegistry)
.filter(RegistryImpl.class::isInstance)
.map(RegistryImpl.class::cast)
.findFirst()
.map(RegistryImpl::getGlobalTags)
.orElseGet(() -> new Tag[0]);
prometheus.withGlobalTags(globalTags);
}
public void setBaseRegistry(final MetricRegistry baseRegistry) {
this.baseRegistry = baseRegistry;
}
public void setVendorRegistry(final MetricRegistry vendorRegistry) {
this.vendorRegistry = vendorRegistry;
}
public void setApplicationRegistry(final MetricRegistry applicationRegistry) {
this.applicationRegistry = applicationRegistry;
}
public void setSecurityValidator(final SecurityValidator securityValidator) {
this.securityValidator = securityValidator;
}
public void setPrometheus(final PrometheusFormatter prometheus) {
this.prometheus = prometheus;
}
@GET
@Produces(MediaType.APPLICATION_JSON)
public Object getJson(@Context final SecurityContext securityContext,
@Context final UriInfo uriInfo) {
securityValidator.checkSecurity(securityContext, uriInfo);
return Stream.of(MetricRegistry.Type.values())
.collect(toMap(MetricRegistry.Type::getName, it -> findRegistry(it.getName()).getMetrics().entrySet().stream()
.collect(toMap(this::getKey, m -> toJson(map(m.getValue()), formatTags(m.getKey())), this::merge))));
}
@GET
@Produces(MediaType.TEXT_PLAIN)
public String getText(@Context final SecurityContext securityContext,
@Context final UriInfo uriInfo) {
securityValidator.checkSecurity(securityContext, uriInfo);
return Stream.of(MetricRegistry.Type.values())
.map(type -> {
final MetricRegistry metricRegistry = findRegistry(type.getName());
return prometheus.toText(metricRegistry, type.getName(), metrics(metricRegistry));
})
.collect(StringBuilder::new, StringBuilder::append, StringBuilder::append)
.toString();
}
@GET
@Path("{registry}")
@Produces(MediaType.APPLICATION_JSON)
public Object getJson(@PathParam("registry") final String registry,
@Context final SecurityContext securityContext,
@Context final UriInfo uriInfo) {
securityValidator.checkSecurity(securityContext, uriInfo);
return findRegistry(registry).getMetrics().entrySet().stream()
.collect(toMap(this::getKey, it -> toJson(map(it.getValue()), formatTags(it.getKey())), this::merge));
}
@GET
@Path("{registry}")
@Produces(MediaType.TEXT_PLAIN)
public String getText(@PathParam("registry") final String registry,
@Context final SecurityContext securityContext,
@Context final UriInfo uriInfo) {
securityValidator.checkSecurity(securityContext, uriInfo);
final MetricRegistry metricRegistry = findRegistry(registry);
return prometheus.toText(metricRegistry, registry, metrics(metricRegistry)).toString();
}
@GET
@Path("{registry}/{metric}")
@Produces(MediaType.APPLICATION_JSON)
public Object getJson(@PathParam("registry") final String registry,
@PathParam("metric") final String name,
@Context final SecurityContext securityContext,
@Context final UriInfo uriInfo) {
securityValidator.checkSecurity(securityContext, uriInfo);
final MetricRegistry metricRegistry = findRegistry(registry);
return singleEntry(name, metricRegistry, this::map);
}
@GET
@Path("{registry}/{metric}")
@Produces(MediaType.TEXT_PLAIN)
public String getText(@PathParam("registry") final String registry,
@PathParam("metric") final String name,
@Context final SecurityContext securityContext,
@Context final UriInfo uriInfo) {
securityValidator.checkSecurity(securityContext, uriInfo);
final MetricRegistry metricRegistry = findRegistry(registry);
return prometheus.toText(
metricRegistry, registry,
singleEntry(name, metricRegistry, identity())).toString();
}
@OPTIONS
@Path("{registry}/{metric}")
@Produces(MediaType.APPLICATION_JSON)
public Object getMetadata(@PathParam("registry") final String registry,
@PathParam("metric") final String name,
@Context final SecurityContext securityContext,
@Context final UriInfo uriInfo) {
securityValidator.checkSecurity(securityContext, uriInfo);
final MetricRegistry metricRegistry = findRegistry(registry);
return ofNullable(metricRegistry.getMetadata().get(name))
.map(metadata -> singletonMap(name, mapMeta(metadata, findMetricId(metricRegistry, metadata))))
.orElse(emptyMap());
}
@OPTIONS
@Path("{registry}")
@Produces(MediaType.APPLICATION_JSON)
public Object getMetadata(@PathParam("registry") final String registry,
@Context final SecurityContext securityContext,
@Context final UriInfo uriInfo) {
securityValidator.checkSecurity(securityContext, uriInfo);
final MetricRegistry metricRegistry = findRegistry(registry);
return metricRegistry.getMetadata().entrySet().stream()
.collect(toMap(Map.Entry::getKey, e -> mapMeta(e.getValue(), findMetricId(metricRegistry, e.getValue())), this::merge));
}
private MetricID findMetricId(final MetricRegistry metricRegistry, final Metadata value) {
final Map<MetricID, Metric> metrics = metricRegistry.getMetrics();
final MetricID directKey = RegistryImpl.class.isInstance(metricRegistry) && RegistryImpl.class.cast(metricRegistry).getGlobalTags().length > 0 ?
new MetricID(value.getName(), RegistryImpl.class.cast(metricRegistry).getGlobalTags()) : new MetricID(value.getName());
if (metrics.containsKey(directKey)) {
return directKey;
}
return metrics.keySet().stream()
.filter(it -> Objects.equals(it.getName(), value.getName()))
.findFirst()
.orElse(directKey);
}
private <A> A merge(final A a, final A b) {
if (Map.class.isInstance(a) && Map.class.isInstance(b)) {
final Map<String, Object> firstMap = (Map<String, Object>) a;
final Map<String, Object> secondMap = (Map<String, Object>) b;
final Map<String, Object> merged = Stream.concat(firstMap.entrySet().stream(), secondMap.entrySet().stream())
.collect(toMap(Map.Entry::getKey, Map.Entry::getValue, (m1, m2) -> m1));
return (A) merged;
}
return a;
}
private Map<String, Metric> metrics(final MetricRegistry metricRegistry) {
return metricRegistry.getMetrics().entrySet().stream()
.collect(toMap(this::getKey, Map.Entry::getValue, this::merge));
}
private <T> Map<String, T> singleEntry(final String id, final MetricRegistry metricRegistry,
final Function<Metric, T> metricMapper) {
final MetricID key = RegistryImpl.class.isInstance(metricRegistry) && RegistryImpl.class.cast(metricRegistry).getGlobalTags().length > 0 ?
new MetricID(id, RegistryImpl.class.cast(metricRegistry).getGlobalTags()) : new MetricID(id);
final Map<MetricID, Metric> metrics = metricRegistry.getMetrics();
return ofNullable(metrics.get(key)) // try first without any tag (fast access)
.map(metric -> singletonMap(id + formatTags(key), metricMapper.apply(metric)))
.orElseGet(() -> metrics.keySet().stream().filter(it -> Objects.equals(it.getName(), id)).findFirst() // else find first matching id
.map(metric -> singletonMap(id + formatTags(key), metricMapper.apply(metrics.get(metric))))
.orElseGet(Collections::emptyMap));
}
private Meta mapMeta(final Metadata value, final MetricID metricID) {
return ofNullable(value).map(v -> new Meta(value, metricID, globalTags)).orElse(null);
}
private Object map(final Metric metric) {
if (Counter.class.isInstance(metric)) {
return Counter.class.cast(metric).getCount();
}
if (Gauge.class.isInstance(metric)) {
return Gauge.class.cast(metric).getValue();
}
return metric;
}
private String getKey(final Map.Entry<MetricID, Metric> e) {
if (Counter.class.isInstance(e.getValue()) || Gauge.class.isInstance(e.getValue())) {
return e.getKey().getName() + formatTags(e.getKey());
}
return e.getKey().getName();
}
// https://github.com/eclipse/microprofile-metrics/issues/508
private Object toJson(final Object metric, final String nameSuffix) {
if (Timer.class.isInstance(metric)) {
final Timer meter = Timer.class.cast(metric);
final Map<Object, Object> map = new TreeMap<>();
map.putAll(snapshot(meter.getSnapshot(), nameSuffix));
map.putAll(meter(meter, nameSuffix));
final Duration elapsedTime = meter.getElapsedTime();
map.put("elapsedTime" + nameSuffix, elapsedTime == null ? 0 : elapsedTime.toNanos());
return map;
}
if (SimpleTimer.class.isInstance(metric)) {
final SimpleTimer simpleTimer = SimpleTimer.class.cast(metric);
final Map<Object, Object> map = new TreeMap<>();
map.put("count" + nameSuffix, simpleTimer.getCount());
final Duration elapsedTime = simpleTimer.getElapsedTime();
map.put("elapsedTime" + nameSuffix, elapsedTime == null ? 0 : elapsedTime.toNanos());
final Duration minTimeDuration = simpleTimer.getMinTimeDuration();
map.put("minTimeDuration" + nameSuffix, minTimeDuration == null ? JsonValue.NULL : minTimeDuration.toNanos());
final Duration maxTimeDuration = simpleTimer.getMaxTimeDuration();
map.put("maxTimeDuration" + nameSuffix, maxTimeDuration == null ? JsonValue.NULL : maxTimeDuration.toNanos());
return map;
}
if (Meter.class.isInstance(metric)) {
return meter(Meter.class.cast(metric), nameSuffix);
}
if (Histogram.class.isInstance(metric)) {
final Histogram histogram = Histogram.class.cast(metric);
final Map<Object, Object> map = new TreeMap<>();
map.putAll(snapshot(histogram.getSnapshot(), nameSuffix));
map.put("count" + nameSuffix, histogram.getCount());
return map;
}
if (ConcurrentGauge.class.isInstance(metric)) {
final ConcurrentGauge concurrentGauge = ConcurrentGauge.class.cast(metric);
final Map<Object, Object> map = new TreeMap<>();
map.put("min" + nameSuffix, concurrentGauge.getMin());
map.put("current" + nameSuffix, concurrentGauge.getCount());
map.put("max" + nameSuffix, concurrentGauge.getMax());
return map;
}
// counters and gauges are unwrapped so skip it
return metric;
}
private Map<String, Object> meter(final Metered metered, final String nameSuffix) {
final Map<String, Object> map = new TreeMap<>();
map.put("count" + nameSuffix, metered.getCount());
map.put("meanRate" + nameSuffix, metered.getMeanRate());
map.put("oneMinRate" + nameSuffix, metered.getOneMinuteRate());
map.put("fiveMinRate" + nameSuffix, metered.getFiveMinuteRate());
map.put("fifteenMinRate" + nameSuffix, metered.getFifteenMinuteRate());
return map;
}
private Map<String, Object> snapshot(final Snapshot snapshot, final String nameSuffix) {
final Map<String, Object> map = new TreeMap<>();
map.put("p50" + nameSuffix, snapshot.getMedian());
map.put("p75" + nameSuffix, snapshot.get75thPercentile());
map.put("p95" + nameSuffix, snapshot.get95thPercentile());
map.put("p98" + nameSuffix, snapshot.get98thPercentile());
map.put("p99" + nameSuffix, snapshot.get99thPercentile());
map.put("p999" + nameSuffix, snapshot.get999thPercentile());
map.put("min" + nameSuffix, snapshot.getMin());
map.put("mean" + nameSuffix, snapshot.getMean());
map.put("max" + nameSuffix, snapshot.getMax());
map.put("stddev" + nameSuffix, snapshot.getStdDev());
return map;
}
private MetricRegistry findRegistry(final String registry) {
switch (Stream.of(MetricRegistry.Type.values())
.filter(it -> it.getName().equalsIgnoreCase(registry)).findFirst()
.orElseThrow(() -> new WebApplicationException(Response.Status.NOT_FOUND))) {
case BASE:
return baseRegistry;
case VENDOR:
return vendorRegistry;
default:
return applicationRegistry;
}
}
private String formatTags(final MetricID id) {
return id.getTags().isEmpty() && globalTags.length == 0 ? "" : (';' +
Stream.concat(id.getTagsAsList().stream(), Stream.of(globalTags))
.map(e -> e.getTagName() + "=" + semicolon.matcher(e.getTagValue()).replaceAll("_"))
.distinct()
.collect(joining(";")));
}
public static class Meta {
private final Metadata value;
private final MetricID metricID;
private final Tag[] globalTags;
private Meta(final Metadata value, final MetricID metricID, final Tag[] globalTags) {
this.value = value;
this.metricID = metricID;
this.globalTags = globalTags;
}
public String getName() {
return value.getName();
}
public String getDisplayName() {
return value.getDisplayName();
}
public String getDescription() {
return value.getDescription();
}
public String getType() {
return value.getType();
}
public String getTypeRaw() {
return value.getTypeRaw().name();
}
public String getUnit() {
return value.getUnit();
}
public String getTags() { // not sure why tck expect it, sounds worse than native getTags for clients (array of key/values)
return Stream.concat(
metricID.getTags().entrySet().stream().map(e -> e.getKey() + '=' + e.getValue()),
Stream.of(globalTags).map(e -> e.getTagName() + '=' + e.getTagValue()))
.distinct()
.collect(joining(","));
}
}
}
| 8,531 |
0 | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common | Create_ds/geronimo-metrics/geronimo-metrics-common/src/main/java/org/apache/geronimo/microprofile/metrics/common/jaxrs/SecurityValidator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.microprofile.metrics.common.jaxrs;
import static java.util.Collections.singletonList;
import static java.util.Optional.ofNullable;
import static java.util.function.Function.identity;
import static java.util.stream.Collectors.toList;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Stream;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
import javax.ws.rs.core.UriInfo;
// default let it pass locally (127.*, localhost or 1::*),
// this matches prometheus use case in general
// WARNING: ensure you accept it is public or if you are behind a proxy that you get the right hostname!
public class SecurityValidator {
private static final Predicate<String> LOCAL_MATCHER = it ->
it.startsWith("127.") || it.startsWith("1::") || "localhost".equals(it);
private List<Predicate<String>> acceptedHosts;
private List<String> acceptedRoles;
public void init() {
acceptedHosts = config("geronimo.metrics.jaxrs.acceptedHosts", value -> {
if ("<local>".equals(value)) {
return LOCAL_MATCHER;
}
return (Predicate<String>) value::equals;
}).orElse(singletonList(LOCAL_MATCHER));
acceptedRoles = config("geronimo.metrics.jaxrs.acceptedRoles", identity()).orElse(null);
}
public void checkSecurity(final SecurityContext securityContext, final UriInfo uriInfo) {
if (acceptedHosts != null && uriInfo != null) {
final String host = uriInfo.getRequestUri().getHost();
if (host == null || acceptedHosts.stream().noneMatch(it -> it.test(host))) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
}
if (!hasValidRole(securityContext)) {
if (securityContext == null || securityContext.getUserPrincipal() == null) {
throw new WebApplicationException(Response.Status.UNAUTHORIZED);
}
throw new WebApplicationException(Response.Status.FORBIDDEN);
}
}
private boolean hasValidRole(final SecurityContext securityContext) {
return acceptedRoles == null || (securityContext != null &&
securityContext.getUserPrincipal() != null &&
acceptedRoles.stream().anyMatch(securityContext::isUserInRole));
}
private <T> Optional<List<T>> config(final String key, final Function<String, T> mapper) {
return ofNullable(config(key))
.map(value -> Stream.of(value.split(","))
.map(String::trim)
.filter(it -> !it.isEmpty())
.map(mapper)
.collect(toList()));
}
protected String config(final String key) {
return System.getProperty(key);
}
}
| 8,532 |
0 | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio/benchmarks/BufferCursorSeekBenchmark.java | /*
* Copyright (C) 2018 Square, Inc. and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okio.benchmarks;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import okio.Buffer;
import org.openjdk.jmh.Main;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.RunnerException;
@Fork(1)
@Warmup(iterations = 5, time = 2)
@Measurement(iterations = 5, time = 2)
@State(Scope.Benchmark)
@BenchmarkMode(Mode.SampleTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public class BufferCursorSeekBenchmark {
Buffer buffer;
Buffer.UnsafeCursor cursor;
@Param({ "2097152" })
int bufferSize; // 2 MB = 256 Segments
@Setup
public void setup() throws IOException {
byte[] source = new byte[8192];
buffer = new Buffer();
while (buffer.size() < bufferSize) {
buffer.write(source);
}
cursor = new Buffer.UnsafeCursor();
}
@Benchmark
public void seekBeginning() {
buffer.readUnsafe(cursor);
try {
cursor.seek(0);
} finally {
cursor.close();
}
}
@Benchmark
public void seekEnd() {
buffer.readUnsafe(cursor);
try {
cursor.seek(buffer.size() - 1);
} finally {
cursor.close();
}
}
@Benchmark
public void seekForward() {
buffer.readUnsafe(cursor);
try {
cursor.seek(0);
cursor.seek(1);
} finally {
cursor.close();
}
}
@Benchmark
public void seekBackward() {
buffer.readUnsafe(cursor);
try {
cursor.seek(buffer.size() - 1);
cursor.seek(buffer.size() - 2);
} finally {
cursor.close();
}
}
public static void main(String[] args) throws IOException, RunnerException {
Main.main(new String[] {
BufferCursorSeekBenchmark.class.getName()
});
}
}
| 8,533 |
0 | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio/benchmarks/BufferPerformanceBenchmark.java | /*
* Copyright (C) 2014 Square, Inc. and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okio.benchmarks;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Group;
import org.openjdk.jmh.annotations.GroupThreads;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Threads;
import org.openjdk.jmh.annotations.Warmup;
import okio.Buffer;
import okio.BufferedSource;
import okio.Okio;
import okio.Sink;
import okio.Timeout;
import static java.util.Objects.requireNonNull;
@Fork(1)
@Warmup(iterations = 10, time = 10)
@Measurement(iterations = 10, time = 10)
@State(Scope.Benchmark)
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public class BufferPerformanceBenchmark {
public static final File OriginPath =
new File(System.getProperty("okio.bench.origin.path", "/dev/urandom"));
/* Test Workload
*
* Each benchmark thread maintains three buffers; a receive buffer, a process buffer
* and a send buffer. At every operation:
*
* - We fill up the receive buffer using the origin, write the request to the process
* buffer, and consume the process buffer.
* - We fill up the process buffer using the origin, write the response to the send
* buffer, and consume the send buffer.
*
* We use an "origin" source that serves as a preexisting sequence of bytes we can read
* from the file system. The request and response bytes are initialized in the beginning
* and reused throughout the benchmark in order to eliminate GC effects.
*
* Typically, we simulate the usage of small reads and large writes. Requests and
* responses are satisfied with precomputed buffers to eliminate GC effects on
* results.
*
* There are two types of benchmark tests; hot tests are "pedal to the metal" and
* use all CPU they can take. These are useful to magnify performance effects of
* changes but are not realistic use cases that should drive optimization efforts.
* Cold tests introduce think time between the receiving of the request and sending
* of the response. They are more useful as a reasonably realistic workload where
* buffers can be read from and written to during request/response handling but
* may hide subtle effects of most changes on performance. Prefer to look at the cold
* benchmarks first to decide if a bottleneck is worth pursuing, then use the hot
* benchmarks to fine tune optimization efforts.
*
* Benchmark threads do not explicitly communicate between each other (except to sync
* iterations as needed by JMH).
*
* We simulate think time for each benchmark thread by parking the thread for a
* configurable number of microseconds (1000 by default).
*/
@Benchmark
@Threads(1)
public void threads1hot(HotBuffers buffers) throws IOException {
readWriteRecycle(buffers);
}
@Benchmark
@Threads(2)
public void threads2hot(HotBuffers buffers) throws IOException {
readWriteRecycle(buffers);
}
@Benchmark
@Threads(4)
public void threads4hot(HotBuffers buffers) throws IOException {
readWriteRecycle(buffers);
}
@Benchmark
@Threads(8)
public void threads8hot(HotBuffers buffers) throws IOException {
readWriteRecycle(buffers);
}
@Benchmark
@Threads(16)
public void threads16hot(HotBuffers buffers) throws IOException {
readWriteRecycle(buffers);
}
@Benchmark
@Threads(32)
public void threads32hot(HotBuffers buffers) throws IOException {
readWriteRecycle(buffers);
}
@Benchmark
@GroupThreads(1)
@Group("cold")
public void thinkReadHot(HotBuffers buffers) throws IOException {
buffers.receive(requestBytes).readAll(NullSink);
}
@Benchmark
@GroupThreads(3)
@Group("cold")
public void thinkWriteCold(ColdBuffers buffers) throws IOException {
buffers.transmit(responseBytes).readAll(NullSink);
}
private void readWriteRecycle(HotBuffers buffers) throws IOException {
buffers.receive(requestBytes).readAll(NullSink);
buffers.transmit(responseBytes).readAll(NullSink);
}
@Param({ "1000" })
int maxThinkMicros = 1000;
@Param({ "1024" })
int maxReadBytes = 1024;
@Param({ "1024" })
int maxWriteBytes = 1024;
@Param({ "2048" })
int requestSize = 2048;
@Param({ "1" })
int responseFactor = 1;
byte[] requestBytes;
byte[] responseBytes;
@Setup(Level.Trial)
public void storeRequestResponseData() throws IOException {
checkOrigin(OriginPath);
requestBytes = storeSourceData(new byte[requestSize]);
responseBytes = storeSourceData(new byte[requestSize * responseFactor]);
}
private byte[] storeSourceData(byte[] dest) throws IOException {
requireNonNull(dest, "dest == null");
try (BufferedSource source = Okio.buffer(Okio.source(OriginPath))) {
source.readFully(dest);
}
return dest;
}
private void checkOrigin(File path) throws IOException {
requireNonNull(path, "path == null");
if (!path.canRead()) {
throw new IllegalArgumentException("can not access: " + path);
}
try (InputStream in = new FileInputStream(path)) {
int available = in.read();
if (available < 0) {
throw new IllegalArgumentException("can not read: " + path);
}
}
}
/*
* The state class hierarchy is larger than it needs to be due to a JMH
* issue where states inheriting setup methods depending on another state
* do not get initialized correctly from benchmark methods making use
* of groups. To work around, we leave the common setup and teardown code
* in superclasses and move the setup method depending on the bench state
* to subclasses. Without the workaround, it would have been enough for
* `ColdBuffers` to inherit from `HotBuffers`.
*/
@State(Scope.Thread)
public static class ColdBuffers extends BufferSetup {
@Setup(Level.Trial)
public void setupBench(BufferPerformanceBenchmark bench) {
super.bench = bench;
}
@Setup(Level.Invocation)
public void lag() throws InterruptedException {
TimeUnit.MICROSECONDS.sleep(bench.maxThinkMicros);
}
}
@State(Scope.Thread)
public static class HotBuffers extends BufferSetup {
@Setup(Level.Trial)
public void setupBench(BufferPerformanceBenchmark bench) {
super.bench = bench;
}
}
@State(Scope.Thread)
public abstract static class BufferSetup extends BufferState {
BufferPerformanceBenchmark bench;
public BufferedSource receive(byte[] bytes) throws IOException {
return super.receive(bytes, bench.maxReadBytes);
}
public BufferedSource transmit(byte[] bytes) throws IOException {
return super.transmit(bytes, bench.maxWriteBytes);
}
@TearDown
public void dispose() throws IOException {
releaseBuffers();
}
}
public static class BufferState {
@SuppressWarnings("resource")
final Buffer received = new Buffer();
@SuppressWarnings("resource")
final Buffer sent = new Buffer();
@SuppressWarnings("resource")
final Buffer process = new Buffer();
public void releaseBuffers() throws IOException {
received.clear();
sent.clear();
process.clear();
}
/**
* Fills up the receive buffer, hands off to process buffer and returns it for consuming.
* Expects receive and process buffers to be empty. Leaves the receive buffer empty and
* process buffer full.
*/
protected Buffer receive(byte[] bytes, int maxChunkSize) throws IOException {
writeChunked(received, bytes, maxChunkSize).readAll(process);
return process;
}
/**
* Fills up the process buffer, hands off to send buffer and returns it for consuming.
* Expects process and sent buffers to be empty. Leaves the process buffer empty and
* sent buffer full.
*/
protected BufferedSource transmit(byte[] bytes, int maxChunkSize) throws IOException {
writeChunked(process, bytes, maxChunkSize).readAll(sent);
return sent;
}
private BufferedSource writeChunked(Buffer buffer, byte[] bytes, final int chunkSize) {
int remaining = bytes.length;
int offset = 0;
while (remaining > 0) {
int bytesToWrite = Math.min(remaining, chunkSize);
buffer.write(bytes, offset, bytesToWrite);
remaining -= bytesToWrite;
offset += bytesToWrite;
}
return buffer;
}
}
@SuppressWarnings("resource")
private static final Sink NullSink = new Sink() {
@Override public void write(Buffer source, long byteCount) throws EOFException {
source.skip(byteCount);
}
@Override public void flush() {
// nothing
}
@Override public Timeout timeout() {
return Timeout.NONE;
}
@Override public void close() {
// nothing
}
@Override public String toString() {
return "NullSink{}";
}
};
}
| 8,534 |
0 | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio/benchmarks/HashFunctionBenchmark.java | /*
* Copyright (C) 2020 Square, Inc. and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okio.benchmarks;
import java.io.IOException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.Main;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
@Fork(1)
@Warmup(iterations = 5, time = 1)
@Measurement(iterations = 5, time = 1)
@State(Scope.Benchmark)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public class HashFunctionBenchmark {
MessageDigest jvm;
@Param({ "100", "1048576" })
public int messageSize;
@Param({ "SHA-1", "SHA-256", "SHA-512", "MD5" })
public String algorithm;
private byte[] message;
@Setup public void setup() throws NoSuchAlgorithmException {
jvm = MessageDigest.getInstance(algorithm);
message = new byte[messageSize];
}
@Benchmark public void jvm() {
jvm.update(message, 0, messageSize);
jvm.digest();
}
public static void main(String[] args) throws IOException {
Main.main(new String[] { HashFunctionBenchmark.class.getName() });
}
}
| 8,535 |
0 | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio/benchmarks/WriteHexadecimalBenchmark.java | /*
* Copyright (C) 2019 Square, Inc. and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okio.benchmarks;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import okio.Buffer;
import org.openjdk.jmh.Main;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.RunnerException;
@Fork(1)
@Warmup(iterations = 5, time = 2)
@Measurement(iterations = 5, time = 2)
@State(Scope.Benchmark)
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public class WriteHexadecimalBenchmark {
Buffer buffer;
@Param({"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16"})
int width;
@Setup
public void setup() {
buffer = new Buffer();
}
@TearDown(Level.Invocation)
public void teardown() {
buffer.clear();
}
@Benchmark
public void writeHex() {
buffer.writeHexadecimalUnsignedLong(1L << width);
}
public static void main(String[] args) throws IOException, RunnerException {
Main.main(new String[]{
WriteHexadecimalBenchmark.class.getName()
});
}
}
| 8,536 |
0 | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio/benchmarks/SegmentedByteStringBenchmark.java | /*
* Copyright (C) 2018 Square, Inc. and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okio.benchmarks;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import okio.Buffer;
import okio.ByteString;
import org.openjdk.jmh.Main;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.RunnerException;
@Fork(1)
@Warmup(iterations = 5, time = 2)
@Measurement(iterations = 5, time = 2)
@State(Scope.Benchmark)
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public class SegmentedByteStringBenchmark {
private static final ByteString UNKNOWN = ByteString.encodeUtf8("UNKNOWN");
private static final ByteString SEARCH = ByteString.encodeUtf8("tell");
@Param({"20", "2000", "200000"})
int length;
private ByteString byteString;
@Setup
public void setup() {
String part =
"Um, I'll tell you the problem with the scientific power that you're using here, "
+ "it didn't require any discipline to attain it. You read what others had done and you "
+ "took the next step. You didn't earn the knowledge for yourselves, so you don't take any "
+ "responsibility for it. You stood on the shoulders of geniuses to accomplish something "
+ "as fast as you could, and before you even knew what you had, you patented it, and "
+ "packaged it, and slapped it on a plastic lunchbox, and now you're selling it, you wanna "
+ "sell it.";
Buffer buffer = new Buffer();
while (buffer.size() < length) {
buffer.writeUtf8(part);
}
byteString = buffer.snapshot(length);
}
@Benchmark
public ByteString substring() {
return byteString.substring(1, byteString.size() - 1);
}
@Benchmark
public ByteString md5() {
return byteString.md5();
}
@Benchmark
public int indexOfUnknown() {
return byteString.indexOf(UNKNOWN);
}
@Benchmark
public int lastIndexOfUnknown() {
return byteString.lastIndexOf(UNKNOWN);
}
@Benchmark
public int indexOfEarly() {
return byteString.indexOf(SEARCH);
}
@Benchmark
public int lastIndexOfEarly() {
return byteString.lastIndexOf(SEARCH);
}
public static void main(String[] args) throws IOException, RunnerException {
Main.main(new String[] {SegmentedByteStringBenchmark.class.getName()});
}
}
| 8,537 |
0 | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio/benchmarks/SelectBenchmark.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okio.benchmarks;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import okio.Buffer;
import okio.ByteString;
import okio.Options;
import org.openjdk.jmh.Main;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.RunnerException;
@Fork(1)
@Warmup(iterations = 5, time = 2)
@Measurement(iterations = 5, time = 2)
@State(Scope.Benchmark)
@BenchmarkMode(Mode.SampleTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public class SelectBenchmark {
/** Representative sample field names as one might find in a JSON document. */
List<String> sampleValues = Arrays.asList("id", "name", "description", "type", "sku_ids",
"offers", "start_time", "end_time", "expires", "start_of_availability", "duration",
"allow_recording", "thumbnail_id", "thumbnail_formats", "is_episode", "is_live", "channel_id",
"genre_list", "provider_networks", "year", "video_flags", "is_repeat", "series_id",
"series_name", "series_description", "original_air_date", "letter_box", "category",
"child_protection_rating", "parental_control_minimum_age", "images", "episode_id",
"season_number", "episode_number", "directors_list", "scriptwriters_list", "actors_list",
"drm_rights", "is_location_chk_reqd", "is_catchup_enabled", "catchup_duration",
"is_timeshift_enabled", "timeshift_duration", "is_startover_enabled", "is_recording_enabled",
"suspension_time", "shared_ref_id", "linked_channel_number", "audio_lang", "subcategory",
"metadata_root_id", "ref_id", "ref_type", "display_position", "thumbnail_format_list",
"network", "external_url", "offer_type", "em_format", "em_artist_name", "assets",
"media_class", "media_id", "channel_number");
@Param({ "4", "8", "16", "32", "64" })
int optionCount;
@Param({ "2048" })
int selectCount;
Buffer buffer = new Buffer();
Options options;
ByteString sampleData;
@Setup
public void setup() throws IOException {
ByteString[] byteStrings = new ByteString[optionCount];
for (int i = 0; i < optionCount; i++) {
byteStrings[i] = ByteString.encodeUtf8(sampleValues.get(i) + "\"");
}
options = Options.of(byteStrings);
Random dice = new Random(0);
Buffer sampleDataBuffer = new Buffer();
for (int i = 0; i < selectCount; i++) {
sampleDataBuffer.write(byteStrings[dice.nextInt(optionCount)]);
}
sampleData = sampleDataBuffer.readByteString();
}
@Benchmark
public void select() throws IOException {
buffer.write(sampleData);
for (int i = 0; i < selectCount; i++) {
buffer.select(options);
}
if (!buffer.exhausted()) throw new AssertionError();
}
public static void main(String[] args) throws IOException, RunnerException {
Main.main(new String[] {
SelectBenchmark.class.getName()
});
}
}
| 8,538 |
0 | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio/benchmarks/ReadByteStringBenchmark.java | /*
* Copyright (C) 2019 Square, Inc. and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okio.benchmarks;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import okio.Buffer;
import org.openjdk.jmh.Main;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.RunnerException;
@Fork(1)
@Warmup(iterations = 5, time = 2)
@Measurement(iterations = 5, time = 2)
@State(Scope.Benchmark)
@BenchmarkMode(Mode.SampleTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public class ReadByteStringBenchmark {
Buffer buffer;
@Param({"32768"})
int bufferSize;
@Param({"8", "16", "32", "64", "128", "256", "512", "1024", "2048", "4096", "8192", "16384",
"32768"})
int byteStringSize;
@Setup
public void setup() {
buffer = new Buffer().write(new byte[bufferSize]);
}
@Benchmark
public void readByteString() throws IOException {
buffer.write(buffer.readByteString(byteStringSize));
}
@Benchmark
public void readByteString_toByteArray() throws IOException {
buffer.write(buffer.readByteString(byteStringSize).toByteArray());
}
public static void main(String[] args) throws IOException, RunnerException {
Main.main(new String[]{
ReadByteStringBenchmark.class.getName()
});
}
}
| 8,539 |
0 | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio/benchmarks/GetByteBenchmark.java | /*
* Copyright (C) 2018 Square, Inc. and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okio.benchmarks;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import okio.Buffer;
import org.openjdk.jmh.Main;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.RunnerException;
@Fork(1)
@Warmup(iterations = 5, time = 2)
@Measurement(iterations = 5, time = 2)
@State(Scope.Benchmark)
@BenchmarkMode(Mode.SampleTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public class GetByteBenchmark {
Buffer buffer;
@Param({ "2097152" })
int bufferSize; // 2 MB = 256 Segments
@Setup
public void setup() throws IOException {
buffer = new Buffer();
while (buffer.size() < bufferSize) {
buffer.write(new byte[8192]);
}
}
@Benchmark
public void getByteBeginning() {
buffer.getByte(0);
}
@Benchmark
public void getByteEnd() {
buffer.getByte(buffer.size() - 1);
}
@Benchmark
public void getByteMiddle() {
buffer.getByte(buffer.size() / 2);
}
public static void main(String[] args) throws IOException, RunnerException {
Main.main(new String[] {
GetByteBenchmark.class.getName()
});
}
}
| 8,540 |
0 | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio/benchmarks/BufferUtf8Benchmark.java | /*
* Copyright (C) 2018 Square, Inc. and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okio.benchmarks;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.Main;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.RunnerException;
import okio.Buffer;
import okio.ByteString;
@Fork(1)
@Warmup(iterations = 5, time = 2)
@Measurement(iterations = 5, time = 2)
@State(Scope.Benchmark)
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public class BufferUtf8Benchmark {
private static final Map<String, String> strings = new HashMap<>();
static {
strings.put(
"ascii",
"Um, I'll tell you the problem with the scientific power that you're using here, "
+ "it didn't require any discipline to attain it. You read what others had done and you "
+ "took the next step. You didn't earn the knowledge for yourselves, so you don't take any "
+ "responsibility for it. You stood on the shoulders of geniuses to accomplish something "
+ "as fast as you could, and before you even knew what you had, you patented it, and "
+ "packaged it, and slapped it on a plastic lunchbox, and now you're selling it, you wanna "
+ "sell it.");
strings.put(
"utf8",
"Սm, I'll 𝓽𝖾ll ᶌօ𝘂 ᴛℎ℮ 𝜚𝕣०bl𝖾m wі𝕥𝒽 𝘵𝘩𝐞 𝓼𝙘𝐢𝔢𝓷𝗍𝜄𝚏𝑖c 𝛠𝝾w𝚎𝑟 𝕥h⍺𝞃 𝛄𝓸𝘂'𝒓𝗲 υ𝖘𝓲𝗇ɡ 𝕙𝚎𝑟e, "
+ "𝛊𝓽 ⅆ𝕚𝐝𝝿'𝗍 𝔯𝙚𝙦ᴜ𝜾𝒓𝘦 𝔞𝘯𝐲 ԁ𝜄𝑠𝚌ι𝘱lι𝒏e 𝑡𝜎 𝕒𝚝𝖙𝓪і𝞹 𝔦𝚝. 𝒀ο𝗎 𝔯𝑒⍺𝖉 w𝐡𝝰𝔱 𝞂𝞽һ𝓮𝓇ƽ հ𝖺𝖉 ⅾ𝛐𝝅ⅇ 𝝰πԁ 𝔂ᴑᴜ 𝓉ﮨ၀𝚔 "
+ "т𝒽𝑒 𝗇𝕖ⅹ𝚝 𝔰𝒕е𝓅. 𝘠ⲟ𝖚 𝖉ⅰԁ𝝕'τ 𝙚𝚊r𝞹 𝘵Ꮒ𝖾 𝝒𝐧هwl𝑒𝖉ƍ𝙚 𝓯૦r 𝔂𝞼𝒖𝕣𝑠𝕖l𝙫𝖊𝓼, 𐑈о y𝘰𝒖 ⅆە𝗇't 𝜏α𝒌𝕖 𝛂𝟉ℽ "
+ "𝐫ⅇ𝗌ⲣ๐ϖ𝖘ꙇᖯ𝓲l𝓲𝒕𝘆 𝐟𝞼𝘳 𝚤𝑡. 𝛶𝛔𝔲 s𝕥σσ𝐝 ﮩ𝕟 𝒕𝗁𝔢 𝘴𝐡𝜎ᴜlⅾ𝓮𝔯𝚜 𝛐𝙛 ᶃ𝚎ᴨᎥս𝚜𝘦𝓈 𝓽𝞸 a𝒄𝚌𝞸mρl𝛊ꜱ𝐡 𝓈𝚘m𝚎𝞃𝔥⍳𝞹𝔤 𝐚𝗌 𝖋a𝐬𝒕 "
+ "αs γ𝛐𝕦 𝔠ﻫ𝛖lԁ, 𝚊π𝑑 Ь𝑒𝙛૦𝓇𝘦 𝓎٥𝖚 ⅇvℯ𝝅 𝜅ո𝒆w w𝗵𝒂𝘁 ᶌ੦𝗎 h𝐚𝗱, 𝜸ﮨ𝒖 𝓹𝝰𝔱𝖾𝗇𝓽𝔢ⅆ і𝕥, 𝚊𝜛𝓭 𝓹𝖺ⅽϰ𝘢ℊеᏧ 𝑖𝞃, "
+ "𝐚𝛑ꓒ 𝙨l𝔞р𝘱𝔢𝓭 ɩ𝗍 ہ𝛑 𝕒 pl𝛂ѕᴛ𝗂𝐜 l𝞄ℼ𝔠𝒽𝑏ﮪ⨯, 𝔞ϖ𝒹 n𝛔w 𝛾𝐨𝞄'𝗿𝔢 ꜱ℮ll𝙞nɡ ɩ𝘁, 𝙮𝕠𝛖 w𝑎ℼ𝚗𝛂 𝕤𝓮ll 𝙞𝓉.");
// The first 't' is actually a '𝓽'
strings.put(
"sparse",
"Um, I'll 𝓽ell you the problem with the scientific power that you're using here, "
+ "it didn't require any discipline to attain it. You read what others had done and you "
+ "took the next step. You didn't earn the knowledge for yourselves, so you don't take any "
+ "responsibility for it. You stood on the shoulders of geniuses to accomplish something "
+ "as fast as you could, and before you even knew what you had, you patented it, and "
+ "packaged it, and slapped it on a plastic lunchbox, and now you're selling it, you wanna "
+ "sell it.");
strings.put("2bytes", "\u0080\u07ff");
strings.put("3bytes", "\u0800\ud7ff\ue000\uffff");
strings.put("4bytes", "\ud835\udeca");
// high surrogate, 'a', low surrogate, and 'a'
strings.put("bad", "\ud800\u0061\udc00\u0061");
}
@Param({"20", "2000", "200000"})
int length;
@Param({"ascii", "utf8", "sparse", "2bytes", "3bytes", "4bytes", "bad"})
String encoding;
Buffer buffer;
String encode;
ByteString decode;
@Setup
public void setup() {
String part = strings.get(encoding);
// Make all the strings the same length for comparison
StringBuilder builder = new StringBuilder(length + 1_000);
while (builder.length() < length) {
builder.append(part);
}
builder.setLength(length);
// Prepare a string and ByteString for encoding and decoding
buffer = new Buffer();
encode = builder.toString();
Buffer temp = new Buffer();
temp.writeUtf8(encode);
decode = temp.snapshot();
}
@Benchmark
public void writeUtf8() {
buffer.writeUtf8(encode);
buffer.clear();
}
@Benchmark
public String readUtf8() {
buffer.write(decode);
return buffer.readUtf8();
}
public static void main(String[] args) throws IOException, RunnerException {
Main.main(new String[] {BufferUtf8Benchmark.class.getName()});
}
}
| 8,541 |
0 | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio/benchmarks/Utf8Benchmark.java | /*
* Copyright (C) 2018 Square, Inc. and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okio.benchmarks;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.Main;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.RunnerException;
@Fork(1)
@Warmup(iterations = 5, time = 2)
@Measurement(iterations = 5, time = 2)
@State(Scope.Benchmark)
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
public class Utf8Benchmark {
private static final Charset utf8 = StandardCharsets.UTF_8;
private static final Map<String, String> strings = new HashMap<>();
static {
strings.put(
"ascii",
"Um, I'll tell you the problem with the scientific power that you're using here, "
+ "it didn't require any discipline to attain it. You read what others had done and you "
+ "took the next step. You didn't earn the knowledge for yourselves, so you don't take any "
+ "responsibility for it. You stood on the shoulders of geniuses to accomplish something "
+ "as fast as you could, and before you even knew what you had, you patented it, and "
+ "packaged it, and slapped it on a plastic lunchbox, and now you're selling it, you wanna "
+ "sell it.");
strings.put(
"utf8",
"Սm, I'll 𝓽𝖾ll ᶌօ𝘂 ᴛℎ℮ 𝜚𝕣०bl𝖾m wі𝕥𝒽 𝘵𝘩𝐞 𝓼𝙘𝐢𝔢𝓷𝗍𝜄𝚏𝑖c 𝛠𝝾w𝚎𝑟 𝕥h⍺𝞃 𝛄𝓸𝘂'𝒓𝗲 υ𝖘𝓲𝗇ɡ 𝕙𝚎𝑟e, "
+ "𝛊𝓽 ⅆ𝕚𝐝𝝿'𝗍 𝔯𝙚𝙦ᴜ𝜾𝒓𝘦 𝔞𝘯𝐲 ԁ𝜄𝑠𝚌ι𝘱lι𝒏e 𝑡𝜎 𝕒𝚝𝖙𝓪і𝞹 𝔦𝚝. 𝒀ο𝗎 𝔯𝑒⍺𝖉 w𝐡𝝰𝔱 𝞂𝞽һ𝓮𝓇ƽ հ𝖺𝖉 ⅾ𝛐𝝅ⅇ 𝝰πԁ 𝔂ᴑᴜ 𝓉ﮨ၀𝚔 "
+ "т𝒽𝑒 𝗇𝕖ⅹ𝚝 𝔰𝒕е𝓅. 𝘠ⲟ𝖚 𝖉ⅰԁ𝝕'τ 𝙚𝚊r𝞹 𝘵Ꮒ𝖾 𝝒𝐧هwl𝑒𝖉ƍ𝙚 𝓯૦r 𝔂𝞼𝒖𝕣𝑠𝕖l𝙫𝖊𝓼, 𐑈о y𝘰𝒖 ⅆە𝗇't 𝜏α𝒌𝕖 𝛂𝟉ℽ "
+ "𝐫ⅇ𝗌ⲣ๐ϖ𝖘ꙇᖯ𝓲l𝓲𝒕𝘆 𝐟𝞼𝘳 𝚤𝑡. 𝛶𝛔𝔲 s𝕥σσ𝐝 ﮩ𝕟 𝒕𝗁𝔢 𝘴𝐡𝜎ᴜlⅾ𝓮𝔯𝚜 𝛐𝙛 ᶃ𝚎ᴨᎥս𝚜𝘦𝓈 𝓽𝞸 a𝒄𝚌𝞸mρl𝛊ꜱ𝐡 𝓈𝚘m𝚎𝞃𝔥⍳𝞹𝔤 𝐚𝗌 𝖋a𝐬𝒕 "
+ "αs γ𝛐𝕦 𝔠ﻫ𝛖lԁ, 𝚊π𝑑 Ь𝑒𝙛૦𝓇𝘦 𝓎٥𝖚 ⅇvℯ𝝅 𝜅ո𝒆w w𝗵𝒂𝘁 ᶌ੦𝗎 h𝐚𝗱, 𝜸ﮨ𝒖 𝓹𝝰𝔱𝖾𝗇𝓽𝔢ⅆ і𝕥, 𝚊𝜛𝓭 𝓹𝖺ⅽϰ𝘢ℊеᏧ 𝑖𝞃, "
+ "𝐚𝛑ꓒ 𝙨l𝔞р𝘱𝔢𝓭 ɩ𝗍 ہ𝛑 𝕒 pl𝛂ѕᴛ𝗂𝐜 l𝞄ℼ𝔠𝒽𝑏ﮪ⨯, 𝔞ϖ𝒹 n𝛔w 𝛾𝐨𝞄'𝗿𝔢 ꜱ℮ll𝙞nɡ ɩ𝘁, 𝙮𝕠𝛖 w𝑎ℼ𝚗𝛂 𝕤𝓮ll 𝙞𝓉.");
// The first 't' is actually a '𝓽'
strings.put(
"sparse",
"Um, I'll 𝓽ell you the problem with the scientific power that you're using here, "
+ "it didn't require any discipline to attain it. You read what others had done and you "
+ "took the next step. You didn't earn the knowledge for yourselves, so you don't take any "
+ "responsibility for it. You stood on the shoulders of geniuses to accomplish something "
+ "as fast as you could, and before you even knew what you had, you patented it, and "
+ "packaged it, and slapped it on a plastic lunchbox, and now you're selling it, you wanna "
+ "sell it.");
strings.put("2bytes", "\u0080\u07ff");
strings.put("3bytes", "\u0800\ud7ff\ue000\uffff");
strings.put("4bytes", "\ud835\udeca");
// high surrogate, 'a', low surrogate, and 'a'
strings.put("bad", "\ud800\u0061\udc00\u0061");
}
@Param({"20", "2000", "200000"})
int length;
@Param({"ascii", "utf8", "sparse", "2bytes", "3bytes", "4bytes", "bad"})
String encoding;
String encode;
byte[] decodeArray;
@Setup
public void setup() {
String part = strings.get(encoding);
// Make all the strings the same length for comparison
StringBuilder builder = new StringBuilder(length + 1_000);
while (builder.length() < length) {
builder.append(part);
}
builder.setLength(length);
// Prepare a string and byte array for encoding and decoding
encode = builder.toString();
decodeArray = encode.getBytes(utf8);
}
@Benchmark
public byte[] stringToBytesOkio() {
return BenchmarkUtils.encodeUtf8(encode);
}
@Benchmark
public byte[] stringToBytesJava() {
return encode.getBytes(utf8);
}
@Benchmark
public String bytesToStringOkio() {
// For ASCII only decoding, this will never be faster than Java. Because
// Java can trust the decoded char array and it will be the correct size for
// ASCII, it is able to avoid the extra defensive copy Okio is forced to
// make because it doesn't have access to String internals.
return BenchmarkUtils.decodeUtf8(decodeArray);
}
@Benchmark
public String bytesToStringJava() {
return new String(decodeArray, utf8);
}
public static void main(String[] args) throws IOException, RunnerException {
Main.main(new String[] {Utf8Benchmark.class.getName()});
}
}
| 8,542 |
0 | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio | Create_ds/okio/okio/jvm/jmh/src/jmh/java/com/squareup/okio/benchmarks/IndexOfElementBenchmark.java | /*
* Copyright (C) 2016 Square, Inc. and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okio.benchmarks;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import okio.Buffer;
import okio.ByteString;
import org.openjdk.jmh.Main;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.RunnerException;
@Fork(1)
@Warmup(iterations = 5, time = 2)
@Measurement(iterations = 5, time = 2)
@State(Scope.Benchmark)
@BenchmarkMode(Mode.SampleTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
public class IndexOfElementBenchmark {
ByteString byteString = ByteString.encodeUtf8("abcd");
Buffer buffer;
@Param({ "32768" })
int bufferSize;
@Setup
public void setup() throws IOException {
buffer = new Buffer()
.write(new byte[bufferSize / 2])
.write(byteString)
.write(new byte[(bufferSize / 2) - byteString.size()]);
}
@Benchmark
public void indexOfByte() throws IOException {
buffer.indexOf((byte) 'b', 0L);
}
@Benchmark
public void indexOfByteString() throws IOException {
buffer.indexOf(byteString, 0L);
}
@Benchmark
public void indexOfElement() throws IOException {
buffer.indexOfElement(byteString, 0L);
}
public static void main(String[] args) throws IOException, RunnerException {
Main.main(new String[] {
IndexOfElementBenchmark.class.getName()
});
}
}
| 8,543 |
0 | Create_ds/okio/samples/src/jvmTest/java/okio | Create_ds/okio/samples/src/jvmTest/java/okio/samples/SourceMarkerTest.java | /*
* Copyright (C) 2013 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import java.util.Arrays;
import okio.Buffer;
import okio.BufferedSource;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public final class SourceMarkerTest {
@Test public void test() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
BufferedSource source = marker.source();
assertThat(source.readUtf8(3)).isEqualTo("ABC");
long pos3 = marker.mark(7); // DEFGHIJ
assertThat(source.readUtf8(4)).isEqualTo("DEFG");
long pos7 = marker.mark(5); // HIJKL
assertThat(source.readUtf8(4)).isEqualTo("HIJK");
marker.reset(pos7); // Back to 'H'
assertThat(source.readUtf8(3)).isEqualTo("HIJ");
marker.reset(pos3); // Back to 'D'
assertThat(source.readUtf8(7)).isEqualTo("DEFGHIJ");
marker.reset(pos7); // Back to 'H' again.
assertThat(source.readUtf8(6)).isEqualTo("HIJKLM");
try {
marker.reset(pos7);
fail();
} catch (IOException expected) {
assertThat(expected).hasMessage("cannot reset to 7: out of range");
}
try {
marker.reset(pos3);
fail();
} catch (IOException expected) {
assertThat(expected).hasMessage("cannot reset to 3: out of range");
}
}
@Test public void exceedLimitTest() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
BufferedSource source = marker.source();
assertThat(source.readUtf8(3)).isEqualTo("ABC");
long pos3 = marker.mark(Long.MAX_VALUE); // D...
assertThat(source.readUtf8(4)).isEqualTo("DEFG");
long pos7 = marker.mark(5); // H...
assertThat(source.readUtf8(4)).isEqualTo("HIJK");
marker.reset(pos7); // Back to 'H'
assertThat(source.readUtf8(3)).isEqualTo("HIJ");
marker.reset(pos3); // Back to 'D'
assertThat(source.readUtf8(7)).isEqualTo("DEFGHIJ");
marker.reset(pos7); // Back to 'H' again.
assertThat(source.readUtf8(6)).isEqualTo("HIJKLM");
marker.reset(pos7); // Back to 'H' again despite the original limit being exceeded
assertThat(source.readUtf8(2)).isEqualTo("HI"); // confirm we're really back at H
marker.reset(pos3); // Back to 'D' again despite the original limit being exceeded
assertThat(source.readUtf8(2)).isEqualTo("DE"); // confirm that we're really back at D
}
@Test public void markAndLimitSmallerThanUserBuffer() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
BufferedSource source = marker.source();
// Load 5 bytes into the user buffer, then mark 0..3 and confirm that resetting from 4 fails.
source.require(5);
long pos0 = marker.mark(3);
assertThat(source.readUtf8(3)).isEqualTo("ABC");
marker.reset(pos0);
assertThat(source.readUtf8(4)).isEqualTo("ABCD");
try {
marker.reset(pos0);
fail();
} catch (IOException expected) {
assertThat(expected).hasMessage("cannot reset to 0: out of range");
}
}
@Test public void resetTooLow() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
BufferedSource source = marker.source();
source.skip(3);
marker.mark(3);
source.skip(2);
try {
marker.reset(2);
fail();
} catch (IOException expected) {
assertThat(expected).hasMessage("cannot reset to 2: out of range");
}
}
@Test public void resetTooHigh() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
BufferedSource source = marker.source();
marker.mark(3);
source.skip(6);
try {
marker.reset(4);
fail();
} catch (IOException expected) {
assertThat(expected).hasMessage("cannot reset to 4: out of range");
}
}
@Test public void resetUnread() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
marker.mark(3);
try {
marker.reset(2);
fail();
} catch (IOException expected) {
assertThat(expected).hasMessage("cannot reset to 2: out of range");
}
}
@Test public void markNothingBuffered() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
BufferedSource source = marker.source();
long pos0 = marker.mark(5);
assertThat(source.readUtf8(4)).isEqualTo("ABCD");
marker.reset(pos0);
assertThat(source.readUtf8(6)).isEqualTo("ABCDEF");
}
@Test public void mark0() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
BufferedSource source = marker.source();
long pos0 = marker.mark(0);
marker.reset(pos0);
assertThat(source.readUtf8(3)).isEqualTo("ABC");
}
@Test public void markNegative() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
try {
marker.mark(-1L);
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected).hasMessage("readLimit < 0: -1");
}
}
@Test public void resetAfterEof() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDE"));
BufferedSource source = marker.source();
long pos0 = marker.mark(5);
assertThat(source.readUtf8()).isEqualTo("ABCDE");
marker.reset(pos0);
assertThat(source.readUtf8(3)).isEqualTo("ABC");
}
@Test public void closeThenMark() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
BufferedSource source = marker.source();
source.close();
try {
marker.mark(5);
fail();
} catch (IllegalStateException expected) {
assertThat(expected).hasMessage("closed");
}
}
@Test public void closeThenReset() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
BufferedSource source = marker.source();
long pos0 = marker.mark(5);
source.close();
try {
marker.reset(pos0);
fail();
} catch (IllegalStateException expected) {
assertThat(expected).hasMessage("closed");
}
}
@Test public void closeThenRead() throws Exception {
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8("ABCDEFGHIJKLMNOPQRSTUVWXYZ"));
BufferedSource source = marker.source();
source.close();
try {
source.readUtf8(3);
fail();
} catch (IllegalStateException expected) {
assertThat(expected).hasMessage("closed");
}
}
@Test public void multipleSegments() throws Exception {
String as = repeat('a', 10_000);
String bs = repeat('b', 10_000);
String cs = repeat('c', 10_000);
String ds = repeat('d', 10_000);
SourceMarker marker = new SourceMarker(new Buffer().writeUtf8(as + bs + cs + ds));
BufferedSource source = marker.source();
assertThat(source.readUtf8(10_000)).isEqualTo(as);
long pos10k = marker.mark(15_000);
assertThat(source.readUtf8(10_000)).isEqualTo(bs);
long pos20k = marker.mark(15_000);
assertThat(source.readUtf8(10_000)).isEqualTo(cs);
marker.reset(pos20k);
marker.reset(pos10k);
assertThat(source.readUtf8(30_000)).isEqualTo(bs + cs + ds);
}
private String repeat(char c, int count) {
char[] array = new char[count];
Arrays.fill(array, c);
return new String(array);
}
}
| 8,544 |
0 | Create_ds/okio/samples/src/jvmTest/java/okio | Create_ds/okio/samples/src/jvmTest/java/okio/samples/ChannelsTest.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
import java.util.EnumSet;
import java.util.Set;
import okio.Buffer;
import okio.BufferedSource;
import okio.Okio;
import okio.Sink;
import okio.Source;
import okio.Timeout;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import static java.nio.file.StandardOpenOption.APPEND;
import static java.nio.file.StandardOpenOption.READ;
import static java.nio.file.StandardOpenOption.WRITE;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public final class ChannelsTest {
private static final String quote =
"John, the kind of control you're attempting simply is... it's not "
+ "possible. If there is one thing the history of evolution has "
+ "taught us it's that life will not be contained. Life breaks "
+ "free, it expands to new territories and crashes through "
+ "barriers, painfully, maybe even dangerously, but, uh... well, "
+ "there it is.";
private static final Set<StandardOpenOption> r = EnumSet.of(READ);
private static final Set<StandardOpenOption> w = EnumSet.of(WRITE);
private static final Set<StandardOpenOption> append = EnumSet.of(WRITE, APPEND);
@Rule public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Test public void testReadChannel() throws Exception {
ReadableByteChannel channel = new Buffer().writeUtf8(quote);
Buffer buffer = new Buffer();
Source source = new ByteChannelSource(channel, Timeout.NONE);
source.read(buffer, 75);
assertThat(buffer.readUtf8())
.isEqualTo("John, the kind of control you're attempting simply is... it's not possible.");
}
@Test public void testReadChannelFully() throws Exception {
ReadableByteChannel channel = new Buffer().writeUtf8(quote);
BufferedSource source = Okio.buffer(new ByteChannelSource(channel, Timeout.NONE));
assertThat(source.readUtf8())
.isEqualTo(quote);
}
@Test public void testWriteChannel() throws Exception {
Buffer channel = new Buffer();
Sink sink = new ByteChannelSink(channel, Timeout.NONE);
sink.write(new Buffer().writeUtf8(quote), 75);
assertThat(channel.readUtf8())
.isEqualTo("John, the kind of control you're attempting simply is... it's not possible.");
}
@Test public void testReadWriteFile() throws Exception {
java.nio.file.Path path = temporaryFolder.newFile().toPath();
Sink sink = new FileChannelSink(FileChannel.open(path, w), Timeout.NONE);
sink.write(new Buffer().writeUtf8(quote), 317);
sink.close();
assertTrue(Files.exists(path));
assertEquals(quote.length(), Files.size(path));
Buffer buffer = new Buffer();
Source source = new FileChannelSource(FileChannel.open(path, r), Timeout.NONE);
source.read(buffer, 44);
assertThat(buffer.readUtf8())
.isEqualTo("John, the kind of control you're attempting ");
source.read(buffer, 31);
assertThat(buffer.readUtf8())
.isEqualTo("simply is... it's not possible.");
}
@Test public void testAppend() throws Exception {
java.nio.file.Path path = temporaryFolder.newFile().toPath();
Buffer buffer = new Buffer().writeUtf8(quote);
Sink sink;
BufferedSource source;
sink = new FileChannelSink(FileChannel.open(path, w), Timeout.NONE);
sink.write(buffer, 75);
sink.close();
assertTrue(Files.exists(path));
assertEquals(75, Files.size(path));
source = Okio.buffer(new FileChannelSource(FileChannel.open(path, r), Timeout.NONE));
assertThat(source.readUtf8())
.isEqualTo("John, the kind of control you're attempting simply is... it's not possible.");
sink = new FileChannelSink(FileChannel.open(path, append), Timeout.NONE);
sink.write(buffer, buffer.size());
sink.close();
assertTrue(Files.exists(path));
assertEquals(quote.length(), Files.size(path));
source = Okio.buffer(new FileChannelSource(FileChannel.open(path, r), Timeout.NONE));
assertThat(source.readUtf8())
.isEqualTo(quote);
}
}
| 8,545 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/Hashing.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import okio.Buffer;
import okio.BufferedSink;
import okio.BufferedSource;
import okio.ByteString;
import okio.FileSystem;
import okio.HashingSink;
import okio.HashingSource;
import okio.Okio;
import okio.Path;
import okio.Source;
public final class Hashing {
public void run() throws Exception {
Path path = Path.get("../README.md");
System.out.println("ByteString");
ByteString byteString = readByteString(path);
System.out.println(" md5: " + byteString.md5().hex());
System.out.println(" sha1: " + byteString.sha1().hex());
System.out.println(" sha256: " + byteString.sha256().hex());
System.out.println(" sha512: " + byteString.sha512().hex());
System.out.println();
System.out.println("Buffer");
Buffer buffer = readBuffer(path);
System.out.println(" md5: " + buffer.md5().hex());
System.out.println(" sha1: " + buffer.sha1().hex());
System.out.println(" sha256: " + buffer.sha256().hex());
System.out.println(" sha512: " + buffer.sha512().hex());
System.out.println();
System.out.println("HashingSource");
try (HashingSource hashingSource = HashingSource.sha256(FileSystem.SYSTEM.source(path));
BufferedSource source = Okio.buffer(hashingSource)) {
source.readAll(Okio.blackhole());
System.out.println(" sha256: " + hashingSource.hash().hex());
}
System.out.println();
System.out.println("HashingSink");
try (HashingSink hashingSink = HashingSink.sha256(Okio.blackhole());
BufferedSink sink = Okio.buffer(hashingSink);
Source source = FileSystem.SYSTEM.source(path)) {
sink.writeAll(source);
sink.close(); // Emit anything buffered.
System.out.println(" sha256: " + hashingSink.hash().hex());
}
System.out.println();
System.out.println("HMAC");
ByteString secret = ByteString.decodeHex("7065616e7574627574746572");
System.out.println("hmacSha256: " + byteString.hmacSha256(secret).hex());
System.out.println();
}
public ByteString readByteString(Path path) throws IOException {
try (BufferedSource source = Okio.buffer(FileSystem.SYSTEM.source(path))) {
return source.readByteString();
}
}
public Buffer readBuffer(Path path) throws IOException {
try (Source source = FileSystem.SYSTEM.source(path)) {
Buffer buffer = new Buffer();
buffer.writeAll(source);
return buffer;
}
}
public static void main(String[] args) throws Exception {
new Hashing().run();
}
}
| 8,546 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/ReadJavaIoFileLineByLine.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.File;
import java.io.IOException;
import okio.BufferedSource;
import okio.Okio;
import okio.Source;
public final class ReadJavaIoFileLineByLine {
public void run() throws Exception {
readLines(new File("../README.md"));
}
public void readLines(File file) throws IOException {
try (Source fileSource = Okio.source(file);
BufferedSource bufferedFileSource = Okio.buffer(fileSource)) {
while (true) {
String line = bufferedFileSource.readUtf8Line();
if (line == null) break;
if (line.contains("square")) {
System.out.println(line);
}
}
}
}
public static void main(String... args) throws Exception {
new ReadJavaIoFileLineByLine().run();
}
}
| 8,547 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/FileChannelSink.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import okio.Buffer;
import okio.Sink;
import okio.Timeout;
/**
* Special Sink for a FileChannel to take advantage of the
* {@link FileChannel#transferFrom(ReadableByteChannel, long, long) transfer} method available.
*/
final class FileChannelSink implements Sink {
private final FileChannel channel;
private final Timeout timeout;
private long position;
FileChannelSink(FileChannel channel, Timeout timeout) throws IOException {
this.channel = channel;
this.timeout = timeout;
this.position = channel.position();
}
@Override public void write(Buffer source, long byteCount) throws IOException {
if (!channel.isOpen()) throw new IllegalStateException("closed");
if (byteCount == 0) return;
long remaining = byteCount;
while (remaining > 0) {
long written = channel.transferFrom(source, position, remaining);
position += written;
remaining -= written;
}
}
@Override public void flush() throws IOException {
// Cannot alter meta data through this Sink
channel.force(false);
}
@Override public Timeout timeout() {
return timeout;
}
@Override public void close() throws IOException {
channel.close();
}
}
| 8,548 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/FileChannelSource.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.channels.WritableByteChannel;
import okio.Buffer;
import okio.Source;
import okio.Timeout;
/**
* Special Source for a FileChannel to take advantage of the
* {@link FileChannel#transferTo(long, long, WritableByteChannel) transfer} method available.
*/
final class FileChannelSource implements Source {
private final FileChannel channel;
private final Timeout timeout;
private long position;
FileChannelSource(FileChannel channel, Timeout timeout) throws IOException {
this.channel = channel;
this.timeout = timeout;
this.position = channel.position();
}
@Override public long read(Buffer sink, long byteCount) throws IOException {
if (!channel.isOpen()) throw new IllegalStateException("closed");
if (position == channel.size()) return -1L;
long read = channel.transferTo(position, byteCount, sink);
position += read;
return read;
}
@Override public Timeout timeout() {
return timeout;
}
@Override public void close() throws IOException {
channel.close();
}
}
| 8,549 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/WriteFile.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import java.util.Map;
import okio.BufferedSink;
import okio.FileSystem;
import okio.Okio;
import okio.Path;
import okio.Sink;
public final class WriteFile {
public void run() throws Exception {
writeEnv(Path.get("env.txt"));
}
public void writeEnv(Path path) throws IOException {
try (Sink fileSink = FileSystem.SYSTEM.sink(path);
BufferedSink bufferedSink = Okio.buffer(fileSink)) {
for (Map.Entry<String, String> entry : System.getenv().entrySet()) {
bufferedSink.writeUtf8(entry.getKey());
bufferedSink.writeUtf8("=");
bufferedSink.writeUtf8(entry.getValue());
bufferedSink.writeUtf8("\n");
}
}
}
public static void main(String... args) throws Exception {
new WriteFile().run();
}
}
| 8,550 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/Interceptors.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import java.util.Random;
import okio.Buffer;
import okio.ForwardingSink;
import okio.ForwardingSource;
import okio.Sink;
import okio.Source;
/**
* Demonstrates use of the {@link Buffer.UnsafeCursor} class. While other
* samples might demonstrate real use cases, this sample hopes to show the
* basics of using an {@link Buffer.UnsafeCursor}:
* <ul>
* <li>Efficient reuse of a single cursor instance.</li>
* <li>Guaranteed release of an attached cursor.</li>
* <li>Safe traversal of the data in a Buffer.</li>
* </ul>
*
* <p>This sample implements a
* <a href="https://en.wikipedia.org/wiki/Cipher_disk">circular cipher</a> by
* creating a Source which will intercept all bytes written to the wire and
* decrease their value by a specific amount. Then create a Sink which will
* intercept all bytes read from the wire and increase their value by that same
* specific amount. This creates an incredibly insecure way of encrypting data
* written to the wire but demonstrates the power of the
* {@link Buffer.UnsafeCursor} class for efficient operations on the bytes
* being written and read.
*/
public final class Interceptors {
public void run() throws Exception {
final byte cipher = (byte) (new Random().nextInt(256) - 128);
System.out.println("Cipher : " + cipher);
Buffer wire = new Buffer();
// Create a Sink which will intercept and negatively rotate each byte by `cipher`
Sink sink = new InterceptingSink(wire) {
@Override
protected void intercept(byte[] data, int offset, int length) {
for (int i = offset, end = offset + length; i < end; i++) {
data[i] -= cipher;
}
}
};
// Create a Source which will intercept and positively rotate each byte by `cipher`
Source source = new InterceptingSource(wire) {
@Override
protected void intercept(byte[] data, int offset, int length) {
for (int i = offset, end = offset + length; i < end; i++) {
data[i] += cipher;
}
}
};
Buffer transmit = new Buffer();
transmit.writeUtf8("This is not really a secure message");
System.out.println("Transmit : " + transmit);
sink.write(transmit, transmit.size());
System.out.println("Wire : " + wire);
Buffer receive = new Buffer();
source.read(receive, Long.MAX_VALUE);
System.out.println("Receive : " + receive);
}
abstract class InterceptingSource extends ForwardingSource {
private final Buffer.UnsafeCursor cursor = new Buffer.UnsafeCursor();
InterceptingSource(Source source) {
super(source);
}
@Override
public long read(Buffer sink, long byteCount) throws IOException {
if (byteCount < 0) throw new IllegalArgumentException("byteCount < 0: " + byteCount);
if (byteCount == 0) return 0;
long result = super.read(sink, byteCount);
if (result == -1L) return result;
sink.readUnsafe(cursor);
try {
long remaining = result;
for (int length = cursor.seek(sink.size() - result);
remaining > 0 && length > 0;
length = cursor.next()) {
int toIntercept = (int) Math.min(length, remaining);
intercept(cursor.data, cursor.start, toIntercept);
remaining -= toIntercept;
}
} finally {
cursor.close();
}
return result;
}
protected abstract void intercept(byte[] data, int offset, int length) throws IOException;
}
abstract class InterceptingSink extends ForwardingSink {
private final Buffer.UnsafeCursor cursor = new Buffer.UnsafeCursor();
InterceptingSink(Sink delegate) {
super(delegate);
}
@Override
public void write(Buffer source, long byteCount) throws IOException {
if (byteCount < 0) throw new IllegalArgumentException("byteCount < 0: " + byteCount);
if (source.size() < byteCount) {
throw new IllegalArgumentException("size=" + source.size() + " byteCount=" + byteCount);
}
if (byteCount == 0) return;
source.readUnsafe(cursor);
try {
long remaining = byteCount;
for (int length = cursor.seek(0);
remaining > 0 && length > 0;
length = cursor.next()) {
int toIntercept = (int) Math.min(length, remaining);
intercept(cursor.data, cursor.start, toIntercept);
remaining -= toIntercept;
}
} finally {
cursor.close();
}
super.write(source, byteCount);
}
protected abstract void intercept(byte[] data, int offset, int length) throws IOException;
}
public static void main(String... args) throws Exception {
new Interceptors().run();
}
}
| 8,551 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/ByteChannelSink.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.WritableByteChannel;
import okio.Buffer;
import okio.Sink;
import okio.Timeout;
/**
* Creates a Sink around a WritableByteChannel and efficiently writes data using an UnsafeCursor.
*
* <p>This is a basic example showing another use for the UnsafeCursor. Using the
* {@link ByteBuffer#wrap(byte[], int, int) ByteBuffer.wrap()} along with access to Buffer segments,
* a WritableByteChannel can be given direct access to Buffer data without having to copy the data.
*/
final class ByteChannelSink implements Sink {
private final WritableByteChannel channel;
private final Timeout timeout;
private final Buffer.UnsafeCursor cursor = new Buffer.UnsafeCursor();
ByteChannelSink(WritableByteChannel channel, Timeout timeout) {
this.channel = channel;
this.timeout = timeout;
}
@Override public void write(Buffer source, long byteCount) throws IOException {
if (!channel.isOpen()) throw new IllegalStateException("closed");
if (byteCount == 0) return;
long remaining = byteCount;
while (remaining > 0) {
timeout.throwIfReached();
try (Buffer.UnsafeCursor ignored = source.readUnsafe(cursor)) {
cursor.seek(0);
int length = (int) Math.min(cursor.end - cursor.start, remaining);
int written = channel.write(ByteBuffer.wrap(cursor.data, cursor.start, length));
remaining -= written;
source.skip(written);
}
}
}
@Override public void flush() {}
@Override public Timeout timeout() {
return timeout;
}
@Override public void close() throws IOException {
channel.close();
}
}
| 8,552 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/WriteJavaIoFile.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import okio.BufferedSink;
import okio.Okio;
import okio.Sink;
public final class WriteJavaIoFile {
public void run() throws Exception {
writeEnv(new File("env.txt"));
}
public void writeEnv(File file) throws IOException {
try (Sink fileSink = Okio.sink(file);
BufferedSink bufferedSink = Okio.buffer(fileSink)) {
for (Map.Entry<String, String> entry : System.getenv().entrySet()) {
bufferedSink.writeUtf8(entry.getKey());
bufferedSink.writeUtf8("=");
bufferedSink.writeUtf8(entry.getValue());
bufferedSink.writeUtf8("\n");
}
}
}
public static void main(String... args) throws Exception {
new WriteFile().run();
}
}
| 8,553 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/SocksProxyServer.java | /*
* Copyright (C) 2014 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.Closeable;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.ProtocolException;
import java.net.Proxy;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.URL;
import java.net.URLConnection;
import java.util.Collections;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import okio.Buffer;
import okio.BufferedSink;
import okio.BufferedSource;
import okio.Okio;
import okio.Sink;
import okio.Source;
/**
* A partial implementation of SOCKS Protocol Version 5.
* See <a href="https://www.ietf.org/rfc/rfc1928.txt">RFC 1928</a>.
*/
public final class SocksProxyServer {
private static final int VERSION_5 = 5;
private static final int METHOD_NO_AUTHENTICATION_REQUIRED = 0;
private static final int ADDRESS_TYPE_IPV4 = 1;
private static final int ADDRESS_TYPE_DOMAIN_NAME = 3;
private static final int COMMAND_CONNECT = 1;
private static final int REPLY_SUCCEEDED = 0;
private final ExecutorService executor = Executors.newCachedThreadPool();
private ServerSocket serverSocket;
private final Set<Socket> openSockets =
Collections.newSetFromMap(new ConcurrentHashMap<>());
public void start() throws IOException {
serverSocket = new ServerSocket(0);
executor.execute(this::acceptSockets);
}
public void shutdown() throws IOException {
serverSocket.close();
executor.shutdown();
}
public Proxy proxy() {
return new Proxy(Proxy.Type.SOCKS,
InetSocketAddress.createUnresolved("localhost", serverSocket.getLocalPort()));
}
private void acceptSockets() {
try {
while (true) {
final Socket from = serverSocket.accept();
openSockets.add(from);
executor.execute(() -> handleSocket(from));
}
} catch (IOException e) {
System.out.println("shutting down: " + e);
} finally {
for (Socket socket : openSockets) {
closeQuietly(socket);
}
}
}
private void handleSocket(final Socket fromSocket) {
try {
final BufferedSource fromSource = Okio.buffer(Okio.source(fromSocket));
final BufferedSink fromSink = Okio.buffer(Okio.sink(fromSocket));
// Read the hello.
int socksVersion = fromSource.readByte() & 0xff;
if (socksVersion != VERSION_5) throw new ProtocolException();
int methodCount = fromSource.readByte() & 0xff;
boolean foundSupportedMethod = false;
for (int i = 0; i < methodCount; i++) {
int method = fromSource.readByte() & 0xff;
foundSupportedMethod |= method == METHOD_NO_AUTHENTICATION_REQUIRED;
}
if (!foundSupportedMethod) throw new ProtocolException();
// Respond to hello.
fromSink.writeByte(VERSION_5)
.writeByte(METHOD_NO_AUTHENTICATION_REQUIRED)
.emit();
// Read a command.
int version = fromSource.readByte() & 0xff;
int command = fromSource.readByte() & 0xff;
int reserved = fromSource.readByte() & 0xff;
if (version != VERSION_5 || command != COMMAND_CONNECT || reserved != 0) {
throw new ProtocolException();
}
// Read an address.
int addressType = fromSource.readByte() & 0xff;
InetAddress inetAddress;
if (addressType == ADDRESS_TYPE_IPV4) {
inetAddress = InetAddress.getByAddress(fromSource.readByteArray(4L));
} else if (addressType == ADDRESS_TYPE_DOMAIN_NAME){
int domainNameLength = fromSource.readByte() & 0xff;
inetAddress = InetAddress.getByName(fromSource.readUtf8(domainNameLength));
} else {
throw new ProtocolException();
}
int port = fromSource.readShort() & 0xffff;
// Connect to the caller's specified host.
final Socket toSocket = new Socket(inetAddress, port);
openSockets.add(toSocket);
byte[] localAddress = toSocket.getLocalAddress().getAddress();
if (localAddress.length != 4) throw new ProtocolException();
// Write the reply.
fromSink.writeByte(VERSION_5)
.writeByte(REPLY_SUCCEEDED)
.writeByte(0)
.writeByte(ADDRESS_TYPE_IPV4)
.write(localAddress)
.writeShort(toSocket.getLocalPort())
.emit();
// Connect sources to sinks in both directions.
final Sink toSink = Okio.sink(toSocket);
executor.execute(() -> transfer(fromSocket, fromSource, toSink));
final Source toSource = Okio.source(toSocket);
executor.execute(() -> transfer(toSocket, toSource, fromSink));
} catch (IOException e) {
closeQuietly(fromSocket);
openSockets.remove(fromSocket);
System.out.println("connect failed for " + fromSocket + ": " + e);
}
}
/**
* Read data from {@code source} and write it to {@code sink}. This doesn't use {@link
* BufferedSink#writeAll} because that method doesn't flush aggressively and we need that.
*/
private void transfer(Socket sourceSocket, Source source, Sink sink) {
try {
Buffer buffer = new Buffer();
for (long byteCount; (byteCount = source.read(buffer, 8192L)) != -1; ) {
sink.write(buffer, byteCount);
sink.flush();
}
} catch (IOException e) {
System.out.println("transfer failed from " + sourceSocket + ": " + e);
} finally {
closeQuietly(sink);
closeQuietly(source);
closeQuietly(sourceSocket);
openSockets.remove(sourceSocket);
}
}
private void closeQuietly(Closeable c) {
try {
c.close();
} catch (IOException ignored) {
}
}
public static void main(String[] args) throws IOException {
SocksProxyServer proxyServer = new SocksProxyServer();
proxyServer.start();
URL url = new URL("https://publicobject.com/helloworld.txt");
URLConnection connection = url.openConnection(proxyServer.proxy());
try (BufferedSource source = Okio.buffer(Okio.source(connection.getInputStream()))) {
for (String line; (line = source.readUtf8Line()) != null; ) {
System.out.println(line);
}
}
proxyServer.shutdown();
}
}
| 8,554 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/BitmapEncoder.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import okio.BufferedSink;
import okio.FileSystem;
import okio.Okio;
import okio.Path;
public final class BitmapEncoder {
static final class Bitmap {
private final int[][] pixels;
Bitmap(int[][] pixels) {
this.pixels = pixels;
}
int width() {
return pixels[0].length;
}
int height() {
return pixels.length;
}
int red(int x, int y) {
return (pixels[y][x] & 0xff0000) >> 16;
}
int green(int x, int y) {
return (pixels[y][x] & 0xff00) >> 8;
}
int blue(int x, int y) {
return (pixels[y][x] & 0xff);
}
}
/**
* Returns a bitmap that lights up red subpixels at the bottom, green subpixels on the right, and
* blue subpixels in bottom-right.
*/
Bitmap generateGradient() {
int[][] pixels = new int[1080][1920];
for (int y = 0; y < 1080; y++) {
for (int x = 0; x < 1920; x++) {
int r = (int) (y / 1080f * 255);
int g = (int) (x / 1920f * 255);
int b = (int) ((Math.hypot(x, y) / Math.hypot(1080, 1920)) * 255);
pixels[y][x] = r << 16 | g << 8 | b;
}
}
return new Bitmap(pixels);
}
void encode(Bitmap bitmap, FileSystem fileSystem, Path path) throws IOException {
try (BufferedSink sink = Okio.buffer(fileSystem.sink(path))) {
encode(bitmap, sink);
}
}
/** https://en.wikipedia.org/wiki/BMP_file_format */
void encode(Bitmap bitmap, BufferedSink sink) throws IOException {
int height = bitmap.height();
int width = bitmap.width();
int bytesPerPixel = 3;
int rowByteCountWithoutPadding = (bytesPerPixel * width);
int rowByteCount = ((rowByteCountWithoutPadding + 3) / 4) * 4;
int pixelDataSize = rowByteCount * height;
int bmpHeaderSize = 14;
int dibHeaderSize = 40;
// BMP Header
sink.writeUtf8("BM"); // ID.
sink.writeIntLe(bmpHeaderSize + dibHeaderSize + pixelDataSize); // File size.
sink.writeShortLe(0); // Unused.
sink.writeShortLe(0); // Unused.
sink.writeIntLe(bmpHeaderSize + dibHeaderSize); // Offset of pixel data.
// DIB Header
sink.writeIntLe(dibHeaderSize);
sink.writeIntLe(width);
sink.writeIntLe(height);
sink.writeShortLe(1); // Color plane count.
sink.writeShortLe(bytesPerPixel * Byte.SIZE);
sink.writeIntLe(0); // No compression.
sink.writeIntLe(16); // Size of bitmap data including padding.
sink.writeIntLe(2835); // Horizontal print resolution in pixels/meter. (72 dpi).
sink.writeIntLe(2835); // Vertical print resolution in pixels/meter. (72 dpi).
sink.writeIntLe(0); // Palette color count.
sink.writeIntLe(0); // 0 important colors.
// Pixel data.
for (int y = height - 1; y >= 0; y--) {
for (int x = 0; x < width; x++) {
sink.writeByte(bitmap.blue(x, y));
sink.writeByte(bitmap.green(x, y));
sink.writeByte(bitmap.red(x, y));
}
// Padding for 4-byte alignment.
for (int p = rowByteCountWithoutPadding; p < rowByteCount; p++) {
sink.writeByte(0);
}
}
}
public static void main(String[] args) throws Exception {
BitmapEncoder encoder = new BitmapEncoder();
Bitmap bitmap = encoder.generateGradient();
encoder.encode(bitmap, FileSystem.SYSTEM, Path.get("gradient.bmp"));
}
}
| 8,555 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/ReadFileLineByLine.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import okio.BufferedSource;
import okio.FileSystem;
import okio.Okio;
import okio.Path;
import okio.Source;
public final class ReadFileLineByLine {
public void run() throws Exception {
readLines(Path.get("../README.md"));
}
public void readLines(Path path) throws IOException {
try (Source fileSource = FileSystem.SYSTEM.source(path);
BufferedSource bufferedFileSource = Okio.buffer(fileSource)) {
while (true) {
String line = bufferedFileSource.readUtf8Line();
if (line == null) break;
if (line.contains("square")) {
System.out.println(line);
}
}
}
}
public static void main(String... args) throws Exception {
new ReadFileLineByLine().run();
}
}
| 8,556 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/ExploreCharsets.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import okio.ByteString;
import okio.Utf8;
public final class ExploreCharsets {
public void run() throws Exception {
dumpStringData("Café \uD83C\uDF69"); // NFC: é is one code point.
dumpStringData("Café \uD83C\uDF69"); // NFD: e is one code point, its accent is another.
}
public void dumpStringData(String s) throws IOException {
System.out.println(" " + s);
System.out.println(" String.length: " + s.length());
System.out.println("String.codePointCount: " + s.codePointCount(0, s.length()));
System.out.println(" Utf8.size: " + Utf8.size(s));
System.out.println(" UTF-8 bytes: " + ByteString.encodeUtf8(s).hex());
System.out.println();
}
public static void main(String... args) throws Exception {
new ExploreCharsets().run();
}
}
| 8,557 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/Randoms.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import java.util.Random;
import okio.Buffer;
import okio.BufferedSource;
import okio.Okio;
import okio.Source;
import okio.Timeout;
public final class Randoms {
public void run() throws IOException, InterruptedException {
Random random = new Random(3782615686L);
BufferedSource source = Okio.buffer(new RandomSource(random, 5));
System.out.println(source.readUtf8());
}
static final class RandomSource implements Source {
private final Random random;
private long bytesLeft;
RandomSource(Random random, long bytesLeft) {
this.random = random;
this.bytesLeft = bytesLeft;
}
@Override public long read(Buffer sink, long byteCount) throws IOException {
if (bytesLeft == -1L) throw new IllegalStateException("closed");
if (bytesLeft == 0L) return -1L;
if (byteCount > Integer.MAX_VALUE) byteCount = Integer.MAX_VALUE;
if (byteCount > bytesLeft) byteCount = bytesLeft;
// Random is most efficient when computing 32 bits of randomness. Start with that.
int ints = (int) (byteCount / 4);
for (int i = 0; i < ints; i++) {
sink.writeInt(random.nextInt());
}
// If we need 1, 2, or 3 bytes more, keep going. We'll discard 24, 16 or 8 random bits!
int bytes = (int) (byteCount - ints * 4);
if (bytes > 0) {
int bits = random.nextInt();
for (int i = 0; i < bytes; i++) {
sink.writeByte(bits & 0xff);
bits >>>= 8;
}
}
bytesLeft -= byteCount;
return byteCount;
}
@Override public Timeout timeout() {
return Timeout.NONE;
}
@Override public void close() throws IOException {
bytesLeft = -1L;
}
}
public static void main(String... args) throws Exception {
new Randoms().run();
}
}
| 8,558 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/SourceMarker.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import okio.Buffer;
import okio.BufferedSource;
import okio.ForwardingSource;
import okio.Okio;
import okio.Source;
/**
* Builds a buffered source that can rewind to a marked position earlier in the stream.
*
* <p>Mark potential positions to rewind back to with {@link #mark}; rewind back to these positions
* with {@link #reset}. Both operations apply to the position in the {@linkplain #source() buffered
* source}; resetting will impact the buffer.
*
* <p>When marking it is necessary to specify how much data to retain. Once you advance above this
* limit, the mark is discarded and resetting is not permitted. This may be used to lookahead a
* fixed number of bytes without loading an entire stream into memory. To reset an arbitrary
* number of bytes use {@code mark(Long#MAX_VALUE)}.
*/
public final class SourceMarker {
/*
* This class wraps the underlying source in a MarkSource to support mark and reset. It creates a
* BufferedSource for the caller so that it can track its offsets and manipulate its buffer.
*/
/**
* The offset into the underlying source. To compute the user's offset start with this and
* subtract userBuffer.size().
*/
long offset;
/** The offset of the earliest mark, or -1 for no mark. */
long mark = -1L;
/** The offset of the latest readLimit, or -1 for no mark. */
long limit = -1L;
boolean closed;
final MarkSource markSource;
final BufferedSource userSource;
/** A copy of the underlying source's data beginning at {@code mark}. */
final Buffer markBuffer;
/** Just the userSource's buffer. */
final Buffer userBuffer;
public SourceMarker(Source source) {
this.markSource = new MarkSource(source);
this.markBuffer = new Buffer();
this.userSource = Okio.buffer(markSource);
this.userBuffer = userSource.getBuffer();
}
public BufferedSource source() {
return userSource;
}
/**
* Marks the current position in the stream as one to potentially return back to. Returns the
* offset of this position. Call {@link #reset(long)} with this position to return to it later. It
* is an error to call {@link #reset(long)} after consuming more than {@code readLimit} bytes from
* {@linkplain #source() the source}.
*/
public long mark(long readLimit) throws IOException {
if (readLimit < 0L) {
throw new IllegalArgumentException("readLimit < 0: " + readLimit);
}
if (closed) {
throw new IllegalStateException("closed");
}
// Mark the current position in the buffered source.
long userOffset = offset - userBuffer.size();
// If this is a new mark promote userBuffer data into the markBuffer.
if (mark == -1L) {
markBuffer.writeAll(userBuffer);
mark = userOffset;
offset = userOffset;
}
// Grow the limit if necessary.
long newMarkBufferLimit = userOffset + readLimit;
if (newMarkBufferLimit < 0) newMarkBufferLimit = Long.MAX_VALUE; // Long overflow!
limit = Math.max(limit, newMarkBufferLimit);
return userOffset;
}
/** Resets {@linkplain #source() the source} to {@code userOffset}. */
public void reset(long userOffset) throws IOException {
if (closed) {
throw new IllegalStateException("closed");
}
if (userOffset < mark // userOffset is before mark.
|| userOffset > limit // userOffset is beyond limit.
|| userOffset > mark + markBuffer.size() // userOffset is in the future.
|| offset - userBuffer.size() > limit) { // Stream advanced beyond limit.
throw new IOException("cannot reset to " + userOffset + ": out of range");
}
// Clear userBuffer to cause data at 'offset' to be returned by the next read.
offset = userOffset;
userBuffer.clear();
}
final class MarkSource extends ForwardingSource {
MarkSource(Source source) {
super(source);
}
@Override public long read(Buffer sink, long byteCount) throws IOException {
if (closed) {
throw new IllegalStateException("closed");
}
// If there's no mark, go to the underlying source.
if (mark == -1L) {
long result = super.read(sink, byteCount);
if (result == -1L) return -1L;
offset += result;
return result;
}
// If we can read from markBuffer, do that.
if (offset < mark + markBuffer.size()) {
long posInBuffer = offset - mark;
long result = Math.min(byteCount, markBuffer.size() - posInBuffer);
markBuffer.copyTo(sink, posInBuffer, result);
offset += result;
return result;
}
// If we can write to markBuffer, do that.
if (offset < limit) {
long byteCountBeforeLimit = limit - (mark + markBuffer.size());
long result = super.read(markBuffer, Math.min(byteCount, byteCountBeforeLimit));
if (result == -1L) return -1L;
markBuffer.copyTo(sink, markBuffer.size() - result, result);
offset += result;
return result;
}
// Attempt to read past the limit. Data will not be saved.
long result = super.read(sink, byteCount);
if (result == -1L) return -1L;
// We read past the limit. Discard marked data.
markBuffer.clear();
mark = -1L;
limit = -1L;
return result;
}
@Override public void close() throws IOException {
if (closed) return;
closed = true;
markBuffer.clear();
super.close();
}
}
}
| 8,559 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/GoldenValue.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import okio.Buffer;
import okio.ByteString;
public final class GoldenValue {
public void run() throws Exception {
Point point = new Point(8.0, 15.0);
ByteString pointBytes = serialize(point);
System.out.println(pointBytes.base64());
ByteString goldenBytes = ByteString.decodeBase64("rO0ABXNyAB5va2lvLnNhbXBsZ"
+ "XMuR29sZGVuVmFsdWUkUG9pbnTdUW8rMji1IwIAAkQAAXhEAAF5eHBAIAAAAAAAAEAuA"
+ "AAAAAAA");
Point decoded = (Point) deserialize(goldenBytes);
assertEquals(point, decoded);
}
private ByteString serialize(Object o) throws IOException {
Buffer buffer = new Buffer();
try (ObjectOutputStream objectOut = new ObjectOutputStream(buffer.outputStream())) {
objectOut.writeObject(o);
}
return buffer.readByteString();
}
private Object deserialize(ByteString byteString) throws IOException, ClassNotFoundException {
Buffer buffer = new Buffer();
buffer.write(byteString);
try (ObjectInputStream objectIn = new ObjectInputStream(buffer.inputStream())) {
Object result = objectIn.readObject();
if (objectIn.read() != -1) throw new IOException("Unconsumed bytes in stream");
return result;
}
}
static final class Point implements Serializable {
double x;
double y;
Point(double x, double y) {
this.x = x;
this.y = y;
}
}
private void assertEquals(Point a, Point b) {
if (a.x != b.x || a.y != b.y) throw new AssertionError();
}
public static void main(String... args) throws Exception {
new GoldenValue().run();
}
}
| 8,560 |
0 | Create_ds/okio/samples/src/jvmMain/java/okio | Create_ds/okio/samples/src/jvmMain/java/okio/samples/ByteChannelSource.java | /*
* Copyright (C) 2018 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio.samples;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.ReadableByteChannel;
import okio.Buffer;
import okio.Source;
import okio.Timeout;
/**
* Creates a Source around a ReadableByteChannel and efficiently reads data using an UnsafeCursor.
*
* <p>This is a basic example showing another use for the UnsafeCursor. Using the
* {@link ByteBuffer#wrap(byte[], int, int) ByteBuffer.wrap()} along with access to Buffer segments,
* a ReadableByteChannel can be given direct access to Buffer data without having to copy the data.
*/
final class ByteChannelSource implements Source {
private final ReadableByteChannel channel;
private final Timeout timeout;
private final Buffer.UnsafeCursor cursor = new Buffer.UnsafeCursor();
ByteChannelSource(ReadableByteChannel channel, Timeout timeout) {
this.channel = channel;
this.timeout = timeout;
}
@Override public long read(Buffer sink, long byteCount) throws IOException {
if (!channel.isOpen()) throw new IllegalStateException("closed");
try (Buffer.UnsafeCursor ignored = sink.readAndWriteUnsafe(cursor)) {
timeout.throwIfReached();
long oldSize = sink.size();
int length = (int) Math.min(8192, byteCount);
cursor.expandBuffer(length);
int read = channel.read(ByteBuffer.wrap(cursor.data, cursor.start, length));
if (read == -1) {
cursor.resizeBuffer(oldSize);
return -1;
} else {
cursor.resizeBuffer(oldSize + read);
return read;
}
}
}
@Override public Timeout timeout() {
return timeout;
}
@Override public void close() throws IOException {
channel.close();
}
}
| 8,561 |
0 | Create_ds/airavata-sandbox/gsoc2022/smilesdb/Server/src/test/java/com | Create_ds/airavata-sandbox/gsoc2022/smilesdb/Server/src/test/java/com/smiles/SpringServerApplicationTests.java | package com.smiles;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class SpringServerApplicationTests {
@Test
void contextLoads() {
}
}
| 8,562 |
0 | Create_ds/airavata-sandbox/gsoc2022/smilesdb/Server/src/main/java/com | Create_ds/airavata-sandbox/gsoc2022/smilesdb/Server/src/main/java/com/smiles/CalcInfoImpl.java | package com.smiles;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoClients;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.Filters;
import io.grpc.Status;
import io.grpc.stub.StreamObserver;
import org.bson.Document;
import org.bson.types.ObjectId;
public class CalcInfoImpl extends CalcInfoServiceGrpc.CalcInfoServiceImplBase {
/* >>>>>>>> Connecting to MongoDB (the BSON document) >>>>>>>>>>>
* MongoDB Hierarchy: Database -> Collection -> Record
* Connecting port: localhost:27017 (default port)
* Database name: SMILESDataModels
* Collection name: InfoDataModel
* >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>*/
private final MongoClient mongoClient = MongoClients.create("mongodb://localhost:27017");
private final MongoDatabase mongoDatabase = mongoClient.getDatabase("smilesDB");
private final MongoCollection<org.bson.Document> mongoCollection = mongoDatabase.getCollection("calcInfo");
/* <<<<<<<<<<< End of MongoDB connection parameters <<<<<<<<<<<<<*/
/* Writing CRUD operation to retrieve the data from Database
* Emoji representation: ✅ Built Success, 🔅 On progress/Failed, ❌ Not started.
* ✅ CREATE/PUT operation.
* ✅ READ/GET operation, show the data by ID provided.
* ❌ UPDATE operation.
* ❌ DELETE operation.
* ✅ LIST operation, show the available data in collection, iteration.
* NOTE: Only the LIST method is essential for displaying the data from record.*/
// Implementing the CREATE operation method
// ✅ Built Success
@Override
public void createCalcinfo(CreateInfoRequest request, StreamObserver<CreateInfoResponse> responseObserver) {
System.out.println(" \n\n ----------- INSERT OPERATION ---------- ");
System.out.println("Received request for indexing a data to MongoDB");
CalcInfo calcInfo = request.getCalcInfo();
System.out.println("Running INSERT operation");
// calcinfo_id is not passed in CREATE method
Document document = new Document("calcinfo_id", calcInfo.getCalcinfoId())
.append("smiles", calcInfo.getSmiles())
.append("nbasis", calcInfo.getNbasis())
.append("nmo", calcInfo.getNmo())
.append("nalpha", calcInfo.getNalpha())
.append("nbeta", calcInfo.getNbeta())
.append("natom", calcInfo.getNatom())
.append("energy", calcInfo.getEnergy());
// Command to insert a document into a database -> collection
mongoCollection.insertOne(document);
System.out.println("Inserted an entity to mongoDB record");
// Get the ID from MongoDb
String id = document.getObjectId("_id").toString();
System.out.println("Entity ID: " + id);
System.out.println("----------------------------------------");
// Create MongoResponse
CreateInfoResponse infoResponse = CreateInfoResponse.newBuilder().setCalcInfo(
// CalcInfo.newBuilder()
// .setCalcinfoId(calcInfo.getCalcinfoId())
// .setSmiles(calcInfo.getSmiles())
// .setNbasis(calcInfo.getNbasis())
// .setNmo(calcInfo.getNmo())
// .setNalpha(calcInfo.getNalpha())
// .setNbeta(calcInfo.getNbeta())
// .setNatom(calcInfo.getNatom())
// .setEnergy(calcInfo.getEnergy())
calcInfo.toBuilder().setCalcinfoId(id).build()
)
.build();
responseObserver.onNext(infoResponse);
responseObserver.onCompleted();
}
// Implementing the READ/GET operation method
// ✅ Built Success
@Override
public void readCalcInfo(ReadInfoRequest request, StreamObserver<ReadInfoResponse> responseObserver) {
// System.out.println(" \n\n ----------- GET/POST OPERATION ---------- ");
// System.out.println("Received READ CalcInfo Request");
// // Get CalcInfo ID
// String infoId = request.getId();
// Document document = null;
// try {
// // Find collection: Implement Filters.eq()
// document = (Document) mongoCollection.find(eq("_id", new ObjectId(infoId))).first();
// // from the LIST, fetch the first one
// }catch (Exception e){
// // e.printStackTrace();
// responseObserver.onError(Status.NOT_FOUND
// .withDescription("CalcInfo is not found for ID: " + infoId)
// .augmentDescription(e.getLocalizedMessage())
// .asRuntimeException());
// }
// System.out.println("Searching for document");
// if (document == null){
// // Since document is not available
// System.out.println("Info Data is not found");
// responseObserver.onError(Status.NOT_FOUND
// .withDescription("Info is not found for ID: " + infoId)
// .asRuntimeException());
// }else {
// System.out.println("Info Data is found");
// CalcInfo calcInfo = documentToCalcInfo(document);
// responseObserver.onNext(
// ReadInfoResponse.newBuilder()
// .setCalcInfo(calcInfo).build());
// System.out.println("Sent the Response");
// responseObserver.onCompleted();
// System.out.println("Server Job Done");
// System.out.println("----------------------------------------");
// }
System.out.println("Received request for Fetching a Blog from MongoDB.");
Document fetchedDocFromMongo =
mongoCollection.find(Filters.eq("_id", new ObjectId(request.getId()))).first();
if (fetchedDocFromMongo == null) {
responseObserver
.onError(Status.NOT_FOUND.withDescription("No blog exists with this Id.").asRuntimeException());
} else {
ReadInfoResponse fetchedBlogResponse = ReadInfoResponse.newBuilder()
.setCalcInfo(CalcInfo.newBuilder()
.setSmiles(fetchedDocFromMongo.getString("smiles"))
.setCalcinfoId(String.valueOf(fetchedDocFromMongo.getObjectId("_id")))
.setNbasis(Long.parseLong(String.valueOf(fetchedDocFromMongo.getLong("nbasis"))))
.setNmo(Long.parseLong(String.valueOf(fetchedDocFromMongo.getLong("nmo"))))
.setNalpha(fetchedDocFromMongo.getLong("nalpha"))
.setNbeta(fetchedDocFromMongo.getLong("nbeta"))
.setNatom(fetchedDocFromMongo.getLong("natom"))
.setEnergy(fetchedDocFromMongo.getDouble("energy"))
.build())
.build();
responseObserver.onNext(fetchedBlogResponse);
}
responseObserver.onCompleted();
}
private CalcInfo documentToCalcInfo(Document document) {
return CalcInfo.newBuilder()
.setSmiles(document.getString("smiles"))
.setCalcinfoId(String.valueOf(document.getObjectId("_id")))
.setNbasis(Long.parseLong(String.valueOf(document.getString("nbasis"))==""?"0":""))
// .setNbasis(Long.parseLong(String.valueOf(document.getString("nbasis").equals("")?document.getString("nbasis"):"0")))
// .setNbasis(document.getLong("nbasis")==null?-344:document.getLong("nbasis"))
.setNmo(Long.parseLong(String.valueOf(document.getLong("nmo"))))
.setNalpha(document.getLong("nalpha"))
.setNbeta(document.getLong("nbeta"))
.setNatom(document.getLong("natom"))
.setEnergy(document.getDouble("energy"))
.build();
}
@Override
public void updateCalcInfo(UpdateInfoRequest request, StreamObserver<UpdateInfoResponse> responseObserver) {
super.updateCalcInfo(request, responseObserver);
}
@Override
public void deleteCalcInfo(DeleteInfoRequest request, StreamObserver<DeleteInfoResponse> responseObserver) {
super.deleteCalcInfo(request, responseObserver);
}
// Implementing the LIST operation method
// ✅ Built Success
@Override
public void listCalcInfo(ListInfoRequest request, StreamObserver<ListInfoResponse> responseObserver) {
System.out.println(" \n\n ----------- LIST OPERATION ---------- ");
System.out.println("Received a request to LIST the Info data.");
// Searching for the data from mongo collection (iteration: document by document)
mongoCollection.find().iterator().forEachRemaining(document -> responseObserver.onNext(
ListInfoResponse.newBuilder()
.setCalcInfo(documentToCalcInfo(document)).build()
));
System.out.println("Successfully displayed the Info Data from Mongo Collection");
System.out.println("----------------------------------------");
responseObserver.onCompleted();
}
} | 8,563 |
0 | Create_ds/airavata-sandbox/gsoc2022/smilesdb/Server/src/main/java/com | Create_ds/airavata-sandbox/gsoc2022/smilesdb/Server/src/main/java/com/smiles/SpringServerApplication.java | package com.smiles;
import io.grpc.Server;
import io.grpc.ServerBuilder;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import java.io.IOException;
@SpringBootApplication
public class SpringServerApplication {
public static void main(String[] args) throws IOException, InterruptedException {
int localport = 50051;
SpringApplication.run(SpringServerApplication.class, args);
System.out.println("TomCat is Running Successfully");
System.out.println("Starting the gRPC services for SMILES Data Models");
Server server = ServerBuilder.forPort(localport)
.addService(new CalcInfoImpl())
.addService(new MoleculeImpl())
// .addService(new GreeterImpl())
.build();
server.start();
System.out.println("Started gRPC services at port: localhost: "+ localport);
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
System.out.println("Received Shutdown Request");
((Server) server).shutdown();
System.out.println("Successfully stopped the server");
}));
server.awaitTermination();
}
}
| 8,564 |
0 | Create_ds/airavata-sandbox/gsoc2022/smilesdb/Server/src/main/java/com | Create_ds/airavata-sandbox/gsoc2022/smilesdb/Server/src/main/java/com/smiles/MoleculeImpl.java | package com.smiles;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoClients;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import io.grpc.Status;
import io.grpc.stub.StreamObserver;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.jetbrains.annotations.NotNull;
import static com.mongodb.client.model.Filters.eq;
public class MoleculeImpl extends MoleculeServiceGrpc.MoleculeServiceImplBase {
/* >>>>>>>> Connecting to MongoDB (the BSON document) >>>>>>>>>>>
* MongoDB Hierarchy: Database -> Collection -> Record
* Connecting port: localhost:27017 (default port)
* Database name: SMILESDataModels
* Collection name: InfoDataModel
* >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>*/
private final MongoClient mongoClient = MongoClients.create("mongodb://localhost:27017");
private final MongoDatabase mongoDatabase = mongoClient.getDatabase("smilesDB");
// private final MongoDatabase mongoDatabase = mongoClient.getDatabase("smilestest1");
private final MongoCollection<org.bson.Document> mongoCollection = mongoDatabase.getCollection("molecule");
// private final MongoCollection<org.bson.Document> mongoCollection = mongoDatabase.getCollection("molecule");
/* <<<<<<<<<<< End of MongoDB connection parameters <<<<<<<<<<<<<*/
/* Writing CRUD operation to retrieve the data from Database
* Emoji representation: ✅ Built Success, 🔅 On progress/Failed, ❌ Not started.
* ✅ CREATE/PUT operation.
* ✅ READ/GET operation, show the data by ID provided.
* ❌ UPDATE operation.
* ❌ DELETE operation.
* ✅ LIST operation, show the available data in collection, iteration.
* NOTE: Only the LIST method is essential for displaying the data from record.*/
// Implementing the CREATE operation method
// ✅ Built Success
@Override
public void createMolecule(CreateMoleculeRequest request, StreamObserver<CreateMoleculeResponse> responseObserver) {
System.out.println(" \n\n ----------- INSERT OPERATION ---------- ");
System.out.println("Received request for indexing a data to MongoDB");
Molecule molecule = request.getMolecule();
System.out.println("Running INSERT operation");
Document document = new Document("mol_id", molecule.getMolId())
.append("cas_nr", molecule.getCasNr())
.append("smiles", molecule.getSmiles())
.append("smiles_stereo", molecule.getSmilesStereo())
.append("inchi", molecule.getInchi())
.append("molfile_blob_source", molecule.getMolfileBlobSource())
.append("emp_formula", molecule.getEmpFormula())
.append("emp_formula_sort", molecule.getEmpFormulaSort())
.append("emp_formula_source", molecule.getEmpFormulaSource())
.append("mw", molecule.getMw())
.append("mw_monoiso", molecule.getMwMonoiso())
.append("rdb", molecule.getRdb())
.append("mw_source", molecule.getMwSource())
.append("validated_by", molecule.getValidatedBy())
.append("journal", molecule.getJournal())
.append("auth_of_intr", molecule.getAuthOfIntr())
.append("jour_cit", molecule.getJourCit())
.append("year_publ", molecule.getYearPubl())
.append("doi_link", molecule.getDoiLink())
.append("comp_class", molecule.getCompClass())
.append("cuniq", molecule.getCuniq())
.append("calc_perf", molecule.getCalcPerf())
.append("org_met", molecule.getOrgMet())
.append("mol_chrg", molecule.getMolChrg())
.append("state_ofmat", molecule.getStateOfmat())
.append("color_white", molecule.getColorWhite())
.append("color_uv", molecule.getColorUv())
.append("absorb_max", molecule.getAbsorbMax())
.append("solvent_ae", molecule.getSolventAe())
.append("absorb", molecule.getAbsorb())
.append("conc", molecule.getConc())
.append("extinc", molecule.getExtinc())
.append("emis_max", molecule.getEmisMax())
.append("temp_abs", molecule.getTempAbs())
.append("emis_qy", molecule.getEmisQy())
.append("temp_ems", molecule.getTempEms())
.append("lifetime", molecule.getLifetime())
.append("temp_cv", molecule.getTempCv())
.append("reduc_pot", molecule.getReducPot())
.append("hw_or_pk_rp", molecule.getHwOrPkRp())
.append("oxid_pot", molecule.getOxidPot())
.append("hw_or_pk_op", molecule.getHwOrPkOp())
.append("solvent_cv", molecule.getSolventCv())
.append("electrolyte", molecule.getElectrolyte())
.append("ref_electrd", molecule.getRefElectrd())
.append("inter_thngs", molecule.getInterThngs())
.append("density_20", molecule.getDensity20())
.append("density_20_source", molecule.getDensity20Source())
.append("default_warn_level", molecule.getDefaultWarnLevel())
.append("n_20", molecule.getN20())
.append("n_20_source", molecule.getN20Source())
.append("mp_low", molecule.getMpLow())
.append("mp_high", molecule.getMpHigh())
.append("mp_source", molecule.getMpSource())
.append("bp_low", molecule.getBpLow())
.append("bp_high", molecule.getBpHigh())
.append("bp_press", molecule.getBpPress())
.append("press_unit", molecule.getPressUnit())
.append("bp_source", molecule.getBpSource())
.append("safety_r", molecule.getSafetyR())
.append("safety_h", molecule.getSafetyH())
.append("safety_s", molecule.getSafetyS())
.append("safety_p", molecule.getSafetyP())
.append("safety_text", molecule.getSafetyText())
.append("safety_sym", molecule.getSafetySym())
.append("safety_sym_ghs", molecule.getSafetySymGhs())
.append("safety_source", molecule.getSafetySource())
.append("comment_mol", molecule.getCommentMol());
// Command to insert a document into a database -> collection
mongoCollection.insertOne(document);
System.out.println("Inserted an entity to MongoDB record");
//Get the ID from MongoDB
String id = document.getObjectId("_id").toString();
System.out.println("Entity ID:" + id);
System.out.println("----------------------------------------");
// Create Mongo Response
CreateMoleculeResponse moleculeResponse = CreateMoleculeResponse.newBuilder().setMolecule(
molecule.toBuilder().setMolId(id).build()
).build();
responseObserver.onNext(moleculeResponse);
responseObserver.onCompleted();
}
// Implementing the READ/GET operation method
// ✅ Built Success
@Override
public void readMolecule(ReadMoleculeRequest request, StreamObserver<ReadMoleculeResponse> responseObserver) {
// System.out.println(" \n\n ----------- GET/POST OPERATION ---------- ");
// System.out.println("Received READ Molecule Request");
//
// // Get Molecule ID
// String molId = request.getId();
//
// Document document = null;
// try{
// document = (Document) mongoCollection.find(eq("_id", new ObjectId(molId))).first();
// }catch (Exception e){
// responseObserver.onError(Status.NOT_FOUND
// .withDescription("Molecule is not found for ID: " + molId)
// .augmentDescription(e.getLocalizedMessage())
// .asRuntimeException());
// }
// System.out.println("Searching for document");
// if (document == null){
// System.out.println("Molecule is not found");
// responseObserver.onError(Status.NOT_FOUND
// .withDescription("Molecule is not found for ID: " + molId)
// .asRuntimeException());
// }else {
// System.out.println("Molecule is found");
//
// Molecule molecule = documentToMolecule(document);
// responseObserver.onNext(
// ReadMoleculeResponse.newBuilder()
// .setMolecule(molecule).build());
// System.out.println("Sent the Response");
//
// responseObserver.onCompleted();
// System.out.println("Server Job Done");
// System.out.println("----------------------------------------");
// }
System.out.println("Received request for Fetching a Blog from MongoDB.");
Document fetchedDocFromMongo =
mongoCollection.find(eq("_id", new ObjectId(request.getId()))).first();
if (fetchedDocFromMongo == null) {
responseObserver
.onError(Status.NOT_FOUND.withDescription("No blog exists with this Id.").asRuntimeException());
} else {
System.out.println("Found the Data");
ReadMoleculeResponse fetchedBlogResponse = ReadMoleculeResponse.newBuilder()
.setMolecule(Molecule.newBuilder()
.setMolId(String.valueOf(fetchedDocFromMongo.getObjectId("_id")))
.setCasNr(fetchedDocFromMongo.getString("cas_nr"))
.setSmiles(fetchedDocFromMongo.getString("smiles"))
.setSmilesStereo(fetchedDocFromMongo.getString("smiles_stereo"))
.setInchi(fetchedDocFromMongo.getString("inchi"))
.setMolfileBlobSource(fetchedDocFromMongo.getString("molfile_blob_source"))
.setEmpFormula(fetchedDocFromMongo.getString("emp_formula"))
.setEmpFormulaSort(fetchedDocFromMongo.getString("emp_formula_sort"))
.setEmpFormulaSource(fetchedDocFromMongo.getString("emp_formula_source"))
.setMw(fetchedDocFromMongo.getDouble("mw") == null ? -99 : fetchedDocFromMongo.getDouble("mw"))
.setMwMonoiso(fetchedDocFromMongo.getDouble("mw_monoiso") == null ? -99 : fetchedDocFromMongo.getDouble("mw_monoiso"))
.setRdb(fetchedDocFromMongo.getDouble("rdb") == null ? -99 : fetchedDocFromMongo.getDouble("rdb"))
.setRdb(fetchedDocFromMongo.getDouble("rdb") == null ? -99 : fetchedDocFromMongo.getDouble("rdb"))
.setMwSource(fetchedDocFromMongo.getString("mw_source"))
.setValidatedBy(fetchedDocFromMongo.getString("validated_by"))
.setJournal(fetchedDocFromMongo.getString("journal"))
.setAuthOfIntr(fetchedDocFromMongo.getString("auth_of_intr"))
.setJourCit(fetchedDocFromMongo.getString("jour_cit"))
.setYearPubl(fetchedDocFromMongo.getString("year_publ"))
.setDoiLink(fetchedDocFromMongo.getString("doi_link"))
.setCompClass(fetchedDocFromMongo.getString("comp_class"))
.setCuniq(fetchedDocFromMongo.getString("cuniq"))
.setCalcPerf(fetchedDocFromMongo.getString("calc_perf"))
.setOrgMet(fetchedDocFromMongo.getString("org_met"))
.setMolChrg(fetchedDocFromMongo.getLong("mol_chrg") == null ? -99 : fetchedDocFromMongo.getLong("mol_chrg"))
.setStateOfmat(fetchedDocFromMongo.getString("state_ofmat"))
.setColorWhite(fetchedDocFromMongo.getString("color_white"))
.setColorUv(fetchedDocFromMongo.getString("color_uv"))
.setAbsorbMax(fetchedDocFromMongo.getDouble("absorb_max") == null ? -99 : fetchedDocFromMongo.getDouble("absorb_max"))
.setSolventAe(fetchedDocFromMongo.getString("solvent_ae"))
.setAbsorb(fetchedDocFromMongo.getDouble("absorb") == null ? -99 : fetchedDocFromMongo.getDouble("absorb"))
.setConc(fetchedDocFromMongo.getDouble("conc") == null ? -99 : fetchedDocFromMongo.getDouble("conc"))
.setExtinc(fetchedDocFromMongo.getDouble("extinc") == null ? -99 : fetchedDocFromMongo.getDouble("extinc"))
.setEmisMax(fetchedDocFromMongo.getDouble("emis_max") == null ? -99 : fetchedDocFromMongo.getDouble("emis_max"))
.setTempAbs(fetchedDocFromMongo.getDouble("temp_abs") == null ? -99 : fetchedDocFromMongo.getDouble("temp_abs"))
.setEmisQy(fetchedDocFromMongo.getDouble("emis_qy") == null ? -99 : fetchedDocFromMongo.getDouble("emis_qy"))
.setTempEms(fetchedDocFromMongo.getDouble("temp_ems") == null ? -99 : fetchedDocFromMongo.getDouble("temp_ems"))
.setLifetime(fetchedDocFromMongo.getDouble("lifetime") == null ? -99 : fetchedDocFromMongo.getDouble("lifetime"))
.setTempCv(fetchedDocFromMongo.getDouble("temp_cv") == null ? -99 : fetchedDocFromMongo.getDouble("temp_cv"))
.setReducPot(fetchedDocFromMongo.getDouble("reduc_pot") == null ? -99 : fetchedDocFromMongo.getDouble("reduc_pot"))
.setHwOrPkRp(fetchedDocFromMongo.getString("hw_or_pk_rp"))
.setOxidPot(fetchedDocFromMongo.getDouble("oxid_pot") == null ? -99 : fetchedDocFromMongo.getDouble("oxid_pot"))
.setHwOrPkOp(fetchedDocFromMongo.getString("hw_or_pk_op"))
.setSolventCv(fetchedDocFromMongo.getString("solvent_cv"))
.setElectrolyte(fetchedDocFromMongo.getString("electrolyte"))
.setRefElectrd(fetchedDocFromMongo.getString("ref_electrd"))
.setInterThngs(fetchedDocFromMongo.getString("inter_thngs"))
.setDensity20(fetchedDocFromMongo.getDouble("density_20") == null ? -99 : fetchedDocFromMongo.getDouble("density_20"))
.setDensity20Source(fetchedDocFromMongo.getString("density_20_source"))
.setDefaultWarnLevel(fetchedDocFromMongo.getDouble("default_warn_level") == null ? -99 : fetchedDocFromMongo.getDouble("default_warn_level"))
.setN20(fetchedDocFromMongo.getDouble("n_20") == null ? -99 : fetchedDocFromMongo.getDouble("n_20"))
.setN20Source(fetchedDocFromMongo.getString("n_20_source"))
.setMpLow(fetchedDocFromMongo.getDouble("mp_low") == null ? -99 : fetchedDocFromMongo.getDouble("mp_low"))
.setMpHigh(fetchedDocFromMongo.getDouble("mp_high") == null ? -99 : fetchedDocFromMongo.getDouble("mp_high"))
.setMpSource(fetchedDocFromMongo.getString("mp_source"))
.setBpLow(fetchedDocFromMongo.getDouble("bp_low") == null ? -99 : fetchedDocFromMongo.getDouble("bp_low"))
.setBpHigh(fetchedDocFromMongo.getDouble("bp_high") == null ? -99 : fetchedDocFromMongo.getDouble("bp_high"))
.setBpPress(fetchedDocFromMongo.getDouble("bp_press") == null ? -99 : fetchedDocFromMongo.getDouble("bp_press"))
.setPressUnit(fetchedDocFromMongo.getString("press_unit"))
.setBpSource(fetchedDocFromMongo.getString("bp_source"))
.setSafetyR(fetchedDocFromMongo.getString("safety_r"))
.setSafetyH(fetchedDocFromMongo.getString("safety_h"))
.setSafetyS(fetchedDocFromMongo.getString("safety_s"))
.setSafetyP(fetchedDocFromMongo.getString("safety_p"))
.setSafetyText(fetchedDocFromMongo.getString("safety_text"))
.setSafetySym(fetchedDocFromMongo.getString("safety_sym"))
.setSafetySymGhs(fetchedDocFromMongo.getString("safety_sym_ghs"))
.setSafetySource(fetchedDocFromMongo.getString("safety_source"))
.setCommentMol(fetchedDocFromMongo.getString("comment_mol"))
.build())
.build();
responseObserver.onNext(fetchedBlogResponse);
}
responseObserver.onCompleted();
}
private Molecule documentToMolecule(Document document) {
return Molecule.newBuilder()
.setMolId(String.valueOf(document.getObjectId("_id")))
.setCasNr(document.getString("cas_nr"))
.setSmiles(document.getString("smiles"))
.setSmilesStereo(document.getString("smiles_stereo"))
.setInchi(document.getString("inchi"))
.setMolfileBlobSource(document.getString("molfile_blob_source"))
.setEmpFormula(document.getString("emp_formula"))
.setEmpFormulaSort(document.getString("emp_formula_sort"))
.setEmpFormulaSource(document.getString("emp_formula_source"))
.setMw(document.getDouble("mw"))
.setMwMonoiso(document.getDouble("mw_monoiso"))
.setRdb(document.getDouble("rdb"))
.setMwSource(document.getString("mw_source"))
.setValidatedBy(document.getString("validated_by"))
.setJournal(document.getString("journal"))
.setAuthOfIntr(document.getString("auth_of_intr"))
.setJourCit(document.getString("jour_cit"))
.setYearPubl(document.getString("year_publ"))
.setDoiLink(document.getString("doi_link"))
.setCompClass(document.getString("comp_class"))
.setCuniq(document.getString("cuniq"))
.setCalcPerf(document.getString("calc_perf"))
.setOrgMet(document.getString("org_met"))
// .setMolChrg(Long.parseLong(document.getString("mol_chrg").equals("")?"9999999":document.getString("mol_chrg")))
.setStateOfmat(document.getString("state_ofmat"))
.setColorWhite(document.getString("color_white"))
.setColorUv(document.getString("color_uv"))
.setAbsorbMax(document.getDouble("absorb_max"))
.setSolventAe(document.getString("solvent_ae"))
.setAbsorb(document.getDouble("absorb"))
.setConc(document.getDouble("conc"))
.setExtinc(document.getDouble("extinc"))
.setEmisMax(document.getDouble("emis_max"))
.setTempAbs(document.getDouble("temp_abs"))
.setEmisQy(document.getDouble("emis_qy"))
.setTempEms(document.getDouble("temp_ems"))
.setLifetime(document.getDouble("lifetime"))
.setTempCv(document.getDouble("temp_cv"))
.setReducPot(document.getDouble("reduc_pot"))
.setHwOrPkRp(document.getString("hw_or_pk_rp"))
.setOxidPot(document.getDouble("oxid_pot"))
.setHwOrPkOp(document.getString("hw_or_pk_op"))
.setSolventCv(document.getString("solvent_cv"))
.setElectrolyte(document.getString("electrolyte"))
.setRefElectrd(document.getString("ref_electrd"))
.setInterThngs(document.getString("inter_thngs"))
.setDensity20(document.getDouble("density_20"))
.setDensity20Source(document.getString("density_20_source"))
.setDefaultWarnLevel(document.getDouble("default_warn_level"))
.setN20(document.getDouble("n_20"))
.setN20Source(document.getString("n_20_source"))
.setMpLow(document.getDouble("mp_low"))
.setMpHigh(document.getDouble("mp_high"))
.setMpSource(document.getString("mp_source"))
.setBpLow(document.getDouble("bp_low"))
.setBpHigh(document.getDouble("bp_high"))
.setBpPress(document.getDouble("bp_press"))
.setPressUnit(document.getString("press_unit"))
.setBpSource(document.getString("bp_source"))
.setSafetyR(document.getString("safety_r"))
.setSafetyH(document.getString("safety_h"))
.setSafetyS(document.getString("safety_s"))
.setSafetyP(document.getString("safety_p"))
.setSafetyText(document.getString("safety_text"))
.setSafetySym(document.getString("safety_sym"))
.setSafetySymGhs(document.getString("safety_sym_ghs"))
.setSafetySource(document.getString("safety_source"))
.setCommentMol(document.getString("comment_mol"))
.build();
}
@Override
public void updateMolecule(UpdateMoleculeRequest request, StreamObserver<UpdateMoleculeResponse> responseObserver) {
super.updateMolecule(request, responseObserver);
}
@Override
public void deleteMolecule(DeleteMoleculeRequest request, StreamObserver<DeleteMoleculeResponse> responseObserver) {
super.deleteMolecule(request, responseObserver);
}
@Override
public void listMolecule(ListMoleculeRequest request, StreamObserver<ListMoleculeResponse> responseObserver) {
System.out.println(" \n\n ----------- LIST OPERATION ---------- ");
System.out.println("Received a request to LIST Molecule data.");
// Searching for the data from mongo collection(iteration:document by document)
mongoCollection.find().iterator().forEachRemaining(document -> responseObserver.onNext(
ListMoleculeResponse.newBuilder()
.setMolecule(documentToMolecule(document)).build()
));
System.out.println("Successfully retrieved the Info Data from Mongo Collection");
System.out.println("----------------------------------------");
responseObserver.onCompleted();
}
@Override
public int hashCode() {
return super.hashCode();
}
@Override
public String toString() {
return super.toString();
}
}
| 8,565 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/WorkflowEngine.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine;
import java.util.List;
import org.apache.airavata.workflow.engine.component.ComponentExecutionModel;
import org.apache.airavata.workflow.engine.component.ComponentExecutionInput;
import org.apache.airavata.workflow.engine.component.ComponentModel;
public class WorkflowEngine {
private ComponentExecutionModel model;
private List<ComponentExecutionInput> inputs;
public WorkflowEngine(ComponentExecutionModel model, List<ComponentExecutionInput> inputs) {
this.model=model;
this.inputs=inputs;
}
public ComponentModel getComponentModel(){
return model;
}
public void start(){
model.addInput(inputs.toArray(new ComponentExecutionInput[]{}));
model.activate(null);
}
public void stop(){}
public void pause(){}
} | 8,566 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/FlowManager.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine;
public class FlowManager {
}
| 8,567 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/LaunchFlowController.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine;
public class LaunchFlowController {
public void setupControllers(){
}
public void setupModel(){
}
public void setupInput(){
}
public void execute(){
}
public void retrieveOutput(){
}
}
| 8,568 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/ComponentExecutionInput.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component;
import org.apache.airavata.workflow.model.wf.InvalidDataFormatException;
public class ComponentExecutionInput extends ComponentExecutionData {
private boolean optional=false;
public boolean isOptional() {
return optional;
}
private void setOptional(boolean optional) {
this.optional = optional;
}
public ComponentExecutionInput(String name, String type, Object defaultValue,
Object value, boolean optional) throws InvalidDataFormatException {
setName(name);
setType(type);
setValue(value);
setOptional(optional);
}
public ComponentExecutionInput(String name, Object value)
throws InvalidDataFormatException {
this(name,"String",null,value,false);
}
}
| 8,569 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/ComponentExecutionListener.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component;
public interface ComponentExecutionListener {
public void stateChanged(ComponentExecutionModel executionModel) throws Exception;
public void failed(ComponentExecutionModel executionModel) throws Exception;
public void registered(ComponentExecutionModel executionModel) throws Exception;
public void unregistered(ComponentExecutionModel executionModel) throws Exception;
}
| 8,570 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/ComponentRuntimeData.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component;
public interface ComponentRuntimeData {
}
| 8,571 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/ComponentExecutionModelFactory.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component;
import java.util.List;
import org.apache.airavata.workflow.engine.component.exception.ComponentExecutionModelProviderException;
import org.apache.airavata.workflow.model.graph.Node;
public class ComponentExecutionModelFactory {
private static List<ComponentExecutionModelProvider> providers;
public static ComponentExecutionModel getComponentExecutionModel(Node node) throws ComponentExecutionModelProviderException{
String componentType = node.getComponent().getName();
for (ComponentExecutionModelProvider provider : providers) {
if (provider.getComponentName().equals(componentType)){
return provider.createComponentExecutionModel(node);
}
}
throw new ComponentExecutionModelProviderException("Provider for '"+componentType+"' was not found!!!");
}
}
| 8,572 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/ComponentExecutionModel.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component;
import java.util.List;
public interface ComponentExecutionModel extends ComponentModel {
public void addInput(ComponentExecutionInput...componentInputs);
public void setWorkflowExecutionGraph();
/**
* Attempts self execution once requirements are fulfilled
*/
public void activate(ComponentExecutionModel parent);
public boolean isActivated();
public boolean isReady();
public void initialize(ComponentExecutionModel parent);
public void execute();
public void cancelExecution();
public void pauseExecution();
/**
* Disables self execution
*/
public void deactivate();
/***----------------------IO Data-----------------------***/
public boolean isInputPortSet(String portName);
public ComponentExecutionInput getInputPort(String portName);
public List<ComponentExecutionInput> getInputPorts();
public boolean isOutputPortSet(String portName);
public ComponentExecutionOutput getOutputPort(String portName);
public List<ComponentExecutionOutput> getOutputPorts();
/**
* List of component executions this component execution is dependent on
* @return
*/
public List<ComponentExecutionModel> getDependents();
public ComponentExecutionState getComponentExecutionState();
public void registerExecutionListener(ComponentExecutionListener listener);
public void removeExecutionListener(ComponentExecutionListener listener);
public ComponentExecutionListener[] getExecutionListeners();
}
| 8,573 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/ComponentExecutionModelProvider.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component;
import org.apache.airavata.common.utils.Version;
import org.apache.airavata.workflow.engine.component.exception.ComponentExecutionModelProviderException;
import org.apache.airavata.workflow.model.graph.Node;
public interface ComponentExecutionModelProvider {
public ComponentExecutionModel createComponentExecutionModel(Node node) throws ComponentExecutionModelProviderException;
public String getComponentName();
public Version getVersion();
}
| 8,574 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/ComponentExecutionState.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component;
import java.util.Calendar;
import java.util.Date;
public enum ComponentExecutionState {
PENDING_INIT,//waiting for data or condition(s) to occur
PENDING_EXECUTION,
EXECUTING,
PENDING_INTERACTION,//when paused due to synchronous controllers being triggered
PAUSED,
FINISHED,
FAILED,
CANCELLED;
private Date statusTime=Calendar.getInstance().getTime();
public Date getStatusTime(){
return statusTime;
}
public void udpateStatusTime(){
this.statusTime=Calendar.getInstance().getTime();
}
public void udpateStatusTime(Date statusTime){
this.statusTime=statusTime;
}
}
| 8,575 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/ComponentModel.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component;
public interface ComponentModel {
public ModelMode getMode();
public ComponentConfiguration getComponentConfiguration();
public void setComponentConfiguration(ComponentConfiguration configuration);
}
| 8,576 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/ModelMode.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component;
public enum ModelMode {
DESIGN_MODE,
RUNTIME_MODE
}
| 8,577 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/ComponentConfiguration.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component;
public interface ComponentConfiguration {
}
| 8,578 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/ComponentExecutionData.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component;
public class ComponentExecutionData {
private String name;
private String type;
private Object value;
public ComponentExecutionData() {
super();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public Object getValue() {
return value;
}
public void setValue(Object value) {
this.value = value;
}
} | 8,579 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/ComponentExecutionOutput.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component;
public class ComponentExecutionOutput {
}
| 8,580 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/impl/WorkflowExecutionModel.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component.impl;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.airavata.workflow.engine.component.ComponentExecutionInput;
import org.apache.airavata.workflow.engine.component.ComponentExecutionListener;
import org.apache.airavata.workflow.engine.component.ComponentExecutionModel;
import org.apache.airavata.workflow.engine.component.ComponentExecutionOutput;
import org.apache.airavata.workflow.engine.component.ComponentExecutionState;
public class WorkflowExecutionModel extends WorkflowModel implements ComponentExecutionModel{
private Map<String,ComponentExecutionInput> workflowInputs;
private boolean active=true;
private ComponentExecutionState state=ComponentExecutionState.PENDING_INIT;
private ComponentExecutionModel parent;
@Override
public void addInput(ComponentExecutionInput... componentInputs) {
for (ComponentExecutionInput executionInput : componentInputs) {
getWorkflowInputs().put(executionInput.getName(), executionInput);
}
}
private Map<String,ComponentExecutionInput> getWorkflowInputs(){
if (workflowInputs==null){
workflowInputs=new HashMap<String,ComponentExecutionInput>();
}
return workflowInputs;
}
@Override
public void setWorkflowExecutionGraph() {
}
@Override
public void activate(ComponentExecutionModel parent) {
active=true;
execute();
}
@Override
public boolean isActivated() {
return active;
}
@Override
public boolean isReady() {
return false;
}
@Override
public void execute() {
}
@Override
public void cancelExecution() {
}
@Override
public void pauseExecution() {
}
@Override
public void deactivate() {
active=false;
}
@Override
public boolean isInputPortSet(String portName) {
return false;
}
@Override
public ComponentExecutionInput getInputPort(String portName) {
return null;
}
@Override
public List<ComponentExecutionInput> getInputPorts() {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean isOutputPortSet(String portName) {
// TODO Auto-generated method stub
return false;
}
@Override
public ComponentExecutionOutput getOutputPort(String portName) {
// TODO Auto-generated method stub
return null;
}
@Override
public List<ComponentExecutionOutput> getOutputPorts() {
// TODO Auto-generated method stub
return null;
}
@Override
public List<ComponentExecutionModel> getDependents() {
return null;
}
@Override
public ComponentExecutionState getComponentExecutionState() {
return state;
}
@Override
public void registerExecutionListener(ComponentExecutionListener listener) {
}
@Override
public void removeExecutionListener(ComponentExecutionListener listener) {
}
@Override
public ComponentExecutionListener[] getExecutionListeners() {
// TODO Auto-generated method stub
return null;
}
@Override
public void initialize(ComponentExecutionModel parent) {
this.parent=parent;
}
}
| 8,581 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/impl/WorkflowComponentConfiguration.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component.impl;
import org.apache.airavata.workflow.engine.component.ComponentConfiguration;
public class WorkflowComponentConfiguration implements ComponentConfiguration {
}
| 8,582 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/impl/WorkflowModel.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component.impl;
import org.apache.airavata.workflow.engine.component.ComponentConfiguration;
import org.apache.airavata.workflow.engine.component.ComponentModel;
import org.apache.airavata.workflow.engine.component.ModelMode;
public class WorkflowModel implements ComponentModel {
private ModelMode mode;
@Override
public ModelMode getMode() {
return mode;
}
protected void setMode(ModelMode mode) {
this.mode=mode;
}
@Override
public ComponentConfiguration getComponentConfiguration() {
// TODO Auto-generated method stub
return null;
}
@Override
public void setComponentConfiguration(ComponentConfiguration configuration) {
// TODO Auto-generated method stub
}
}
| 8,583 |
0 | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component | Create_ds/airavata-sandbox/workflow-engine/src/main/java/org/apache/airavata/workflow/engine/component/exception/ComponentExecutionModelProviderException.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.component.exception;
public class ComponentExecutionModelProviderException extends Exception {
private static final long serialVersionUID = -9175160066483238910L;
public ComponentExecutionModelProviderException(String message) {
super(message);
}
public ComponentExecutionModelProviderException(String message, Throwable e) {
super(message, e);
}
}
| 8,584 |
0 | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-server/src/main/java/org/apache/airavata/api | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-server/src/main/java/org/apache/airavata/api/server/MockAiravataAPIServer.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.api.server;
import org.apache.airavata.api.credentials.CredentialManagementService;
import org.apache.airavata.api.gateway.management.GatewayManagementService;
import org.apache.airavata.api.handlers.CredentialManagementHandler;
import org.apache.airavata.api.handlers.GatewayManagementHandler;
import org.apache.thrift.TMultiplexedProcessor;
import org.apache.thrift.server.TServer;
import org.apache.thrift.server.TThreadPoolServer;
import org.apache.thrift.transport.TServerSocket;
import org.apache.thrift.transport.TServerTransport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MockAiravataAPIServer {
private final static Logger logger = LoggerFactory.getLogger(MockAiravataAPIServer.class);
public static CredentialManagementHandler credentialManagementHandler;
public static CredentialManagementService.Processor credentialManagementProcessor;
public static GatewayManagementHandler gatewayManagementHandler;
public static GatewayManagementService.Processor gatewayManagementProcessor;
public static void main(String [] args) {
try {
credentialManagementHandler = new CredentialManagementHandler();
credentialManagementProcessor = new CredentialManagementService.Processor(credentialManagementHandler);
gatewayManagementHandler = new GatewayManagementHandler();
gatewayManagementProcessor = new GatewayManagementService.Processor(gatewayManagementHandler);
TMultiplexedProcessor airavataServerProcessor = new TMultiplexedProcessor();
airavataServerProcessor.registerProcessor("CredentialManagementService",credentialManagementProcessor);
airavataServerProcessor.registerProcessor("GatewayManagementService",gatewayManagementProcessor);
TServerTransport serverTransport = new TServerSocket(9190);
TServer server = new TThreadPoolServer(new TThreadPoolServer.Args(serverTransport).processor(airavataServerProcessor));
System.out.println("Starting Mock Airavata API server...");
server.serve();
} catch (Exception x) {
x.printStackTrace();
}
}
}
| 8,585 |
0 | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-server/src/main/java/org/apache/airavata/api | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-server/src/main/java/org/apache/airavata/api/handlers/CredentialManagementHandler.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.api.handlers;
import org.apache.airavata.api.credentials.CredentialManagementService;
import org.apache.thrift.TException;
import java.util.Map;
public class CredentialManagementHandler implements CredentialManagementService.Iface {
@Override
public String generateAndRegisterSSHKeys(String gatewayId, String userName) throws TException {
return "testSSHKeyToken";
}
@Override
public String getSSHPubKey(String airavataCredStoreToken) throws TException {
return "testsshpubkey";
}
@Override
public Map<String, String> getAllUserSSHPubKeys(String userName) throws TException {
return null;
}
}
| 8,586 |
0 | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-server/src/main/java/org/apache/airavata/api | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-server/src/main/java/org/apache/airavata/api/handlers/GatewayManagementHandler.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.api.handlers;
import org.apache.airavata.api.gateway.management.GatewayManagementService;
import org.apache.thrift.TException;
public class GatewayManagementHandler implements GatewayManagementService.Iface {
@Override
public String registerGateway(String gatewayName) throws TException {
return "testGatewayId";
}
@Override
public String getGatewayName(String gatewayId) throws TException {
return "TestGateway";
}
}
| 8,587 |
0 | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-java-stubs/src/main/java/org/apache/airavata/api | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-java-stubs/src/main/java/org/apache/airavata/api/client/MockAiravataClientFactory.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.api.client;
import org.apache.airavata.api.credentials.CredentialManagementService;
import org.apache.airavata.api.gateway.management.GatewayManagementService;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TMultiplexedProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MockAiravataClientFactory {
private final static Logger logger = LoggerFactory.getLogger(MockAiravataClientFactory.class);
public static CredentialManagementService.Client createCredentialManagementClient(String serverHost, int serverPort) throws Exception{
try {
TTransport transport = new TSocket(serverHost, serverPort);
transport.open();
TProtocol protocol = new TBinaryProtocol(transport);
TMultiplexedProtocol multiplexedProtocol = new TMultiplexedProtocol(protocol, "CredentialManagementService");
return new CredentialManagementService.Client(multiplexedProtocol);
} catch (TTransportException e) {
e.printStackTrace();
throw new Exception("Could not connect to API Server");
}
}
public static GatewayManagementService.Client createGatewayManagementClient(String serverHost, int serverPort) throws Exception{
try {
TTransport transport = new TSocket(serverHost, serverPort);
transport.open();
TProtocol protocol = new TBinaryProtocol(transport);
TMultiplexedProtocol multiplexedProtocol = new TMultiplexedProtocol(protocol, "GatewayManagementService");
return new GatewayManagementService.Client(multiplexedProtocol);
} catch (TTransportException e) {
e.printStackTrace();
throw new Exception("Could not connect to API Server");
}
}
} | 8,588 |
0 | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-java-stubs/src/main/java/org/apache/airavata/api/gateway | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-java-stubs/src/main/java/org/apache/airavata/api/gateway/management/GatewayManagementService.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.3)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.airavata.api.gateway.management;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2016-09-08")
public class GatewayManagementService {
public interface Iface {
public String registerGateway(String gatewayName) throws org.apache.thrift.TException;
public String getGatewayName(String gatewayId) throws org.apache.thrift.TException;
}
public interface AsyncIface {
public void registerGateway(String gatewayName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void getGatewayName(String gatewayId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
}
public static class Client extends org.apache.thrift.TServiceClient implements Iface {
public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {
public Factory() {}
public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
return new Client(prot);
}
public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
return new Client(iprot, oprot);
}
}
public Client(org.apache.thrift.protocol.TProtocol prot)
{
super(prot, prot);
}
public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
super(iprot, oprot);
}
public String registerGateway(String gatewayName) throws org.apache.thrift.TException
{
send_registerGateway(gatewayName);
return recv_registerGateway();
}
public void send_registerGateway(String gatewayName) throws org.apache.thrift.TException
{
registerGateway_args args = new registerGateway_args();
args.setGatewayName(gatewayName);
sendBase("registerGateway", args);
}
public String recv_registerGateway() throws org.apache.thrift.TException
{
registerGateway_result result = new registerGateway_result();
receiveBase(result, "registerGateway");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "registerGateway failed: unknown result");
}
public String getGatewayName(String gatewayId) throws org.apache.thrift.TException
{
send_getGatewayName(gatewayId);
return recv_getGatewayName();
}
public void send_getGatewayName(String gatewayId) throws org.apache.thrift.TException
{
getGatewayName_args args = new getGatewayName_args();
args.setGatewayId(gatewayId);
sendBase("getGatewayName", args);
}
public String recv_getGatewayName() throws org.apache.thrift.TException
{
getGatewayName_result result = new getGatewayName_result();
receiveBase(result, "getGatewayName");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getGatewayName failed: unknown result");
}
}
public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
private org.apache.thrift.async.TAsyncClientManager clientManager;
private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
this.clientManager = clientManager;
this.protocolFactory = protocolFactory;
}
public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
return new AsyncClient(protocolFactory, clientManager, transport);
}
}
public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {
super(protocolFactory, clientManager, transport);
}
public void registerGateway(String gatewayName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
registerGateway_call method_call = new registerGateway_call(gatewayName, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class registerGateway_call extends org.apache.thrift.async.TAsyncMethodCall {
private String gatewayName;
public registerGateway_call(String gatewayName, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.gatewayName = gatewayName;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("registerGateway", org.apache.thrift.protocol.TMessageType.CALL, 0));
registerGateway_args args = new registerGateway_args();
args.setGatewayName(gatewayName);
args.write(prot);
prot.writeMessageEnd();
}
public String getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_registerGateway();
}
}
public void getGatewayName(String gatewayId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
getGatewayName_call method_call = new getGatewayName_call(gatewayId, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class getGatewayName_call extends org.apache.thrift.async.TAsyncMethodCall {
private String gatewayId;
public getGatewayName_call(String gatewayId, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.gatewayId = gatewayId;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getGatewayName", org.apache.thrift.protocol.TMessageType.CALL, 0));
getGatewayName_args args = new getGatewayName_args();
args.setGatewayId(gatewayId);
args.write(prot);
prot.writeMessageEnd();
}
public String getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_getGatewayName();
}
}
}
public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
public Processor(I iface) {
super(iface, getProcessMap(new HashMap<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));
}
protected Processor(I iface, Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
super(iface, getProcessMap(processMap));
}
private static <I extends Iface> Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> getProcessMap(Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
processMap.put("registerGateway", new registerGateway());
processMap.put("getGatewayName", new getGatewayName());
return processMap;
}
public static class registerGateway<I extends Iface> extends org.apache.thrift.ProcessFunction<I, registerGateway_args> {
public registerGateway() {
super("registerGateway");
}
public registerGateway_args getEmptyArgsInstance() {
return new registerGateway_args();
}
protected boolean isOneway() {
return false;
}
public registerGateway_result getResult(I iface, registerGateway_args args) throws org.apache.thrift.TException {
registerGateway_result result = new registerGateway_result();
result.success = iface.registerGateway(args.gatewayName);
return result;
}
}
public static class getGatewayName<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getGatewayName_args> {
public getGatewayName() {
super("getGatewayName");
}
public getGatewayName_args getEmptyArgsInstance() {
return new getGatewayName_args();
}
protected boolean isOneway() {
return false;
}
public getGatewayName_result getResult(I iface, getGatewayName_args args) throws org.apache.thrift.TException {
getGatewayName_result result = new getGatewayName_result();
result.success = iface.getGatewayName(args.gatewayId);
return result;
}
}
}
public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {
private static final Logger LOGGER = LoggerFactory.getLogger(AsyncProcessor.class.getName());
public AsyncProcessor(I iface) {
super(iface, getProcessMap(new HashMap<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>>()));
}
protected AsyncProcessor(I iface, Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
super(iface, getProcessMap(processMap));
}
private static <I extends AsyncIface> Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase,?>> getProcessMap(Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
processMap.put("registerGateway", new registerGateway());
processMap.put("getGatewayName", new getGatewayName());
return processMap;
}
public static class registerGateway<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, registerGateway_args, String> {
public registerGateway() {
super("registerGateway");
}
public registerGateway_args getEmptyArgsInstance() {
return new registerGateway_args();
}
public AsyncMethodCallback<String> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<String>() {
public void onComplete(String o) {
registerGateway_result result = new registerGateway_result();
result.success = o;
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
registerGateway_result result = new registerGateway_result();
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, registerGateway_args args, org.apache.thrift.async.AsyncMethodCallback<String> resultHandler) throws TException {
iface.registerGateway(args.gatewayName,resultHandler);
}
}
public static class getGatewayName<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getGatewayName_args, String> {
public getGatewayName() {
super("getGatewayName");
}
public getGatewayName_args getEmptyArgsInstance() {
return new getGatewayName_args();
}
public AsyncMethodCallback<String> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<String>() {
public void onComplete(String o) {
getGatewayName_result result = new getGatewayName_result();
result.success = o;
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
getGatewayName_result result = new getGatewayName_result();
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, getGatewayName_args args, org.apache.thrift.async.AsyncMethodCallback<String> resultHandler) throws TException {
iface.getGatewayName(args.gatewayId,resultHandler);
}
}
}
public static class registerGateway_args implements org.apache.thrift.TBase<registerGateway_args, registerGateway_args._Fields>, java.io.Serializable, Cloneable, Comparable<registerGateway_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("registerGateway_args");
private static final org.apache.thrift.protocol.TField GATEWAY_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("gatewayName", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new registerGateway_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new registerGateway_argsTupleSchemeFactory());
}
public String gatewayName; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
GATEWAY_NAME((short)1, "gatewayName");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // GATEWAY_NAME
return GATEWAY_NAME;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.GATEWAY_NAME, new org.apache.thrift.meta_data.FieldMetaData("gatewayName", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(registerGateway_args.class, metaDataMap);
}
public registerGateway_args() {
}
public registerGateway_args(
String gatewayName)
{
this();
this.gatewayName = gatewayName;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public registerGateway_args(registerGateway_args other) {
if (other.isSetGatewayName()) {
this.gatewayName = other.gatewayName;
}
}
public registerGateway_args deepCopy() {
return new registerGateway_args(this);
}
@Override
public void clear() {
this.gatewayName = null;
}
public String getGatewayName() {
return this.gatewayName;
}
public registerGateway_args setGatewayName(String gatewayName) {
this.gatewayName = gatewayName;
return this;
}
public void unsetGatewayName() {
this.gatewayName = null;
}
/** Returns true if field gatewayName is set (has been assigned a value) and false otherwise */
public boolean isSetGatewayName() {
return this.gatewayName != null;
}
public void setGatewayNameIsSet(boolean value) {
if (!value) {
this.gatewayName = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case GATEWAY_NAME:
if (value == null) {
unsetGatewayName();
} else {
setGatewayName((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case GATEWAY_NAME:
return getGatewayName();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case GATEWAY_NAME:
return isSetGatewayName();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof registerGateway_args)
return this.equals((registerGateway_args)that);
return false;
}
public boolean equals(registerGateway_args that) {
if (that == null)
return false;
boolean this_present_gatewayName = true && this.isSetGatewayName();
boolean that_present_gatewayName = true && that.isSetGatewayName();
if (this_present_gatewayName || that_present_gatewayName) {
if (!(this_present_gatewayName && that_present_gatewayName))
return false;
if (!this.gatewayName.equals(that.gatewayName))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_gatewayName = true && (isSetGatewayName());
list.add(present_gatewayName);
if (present_gatewayName)
list.add(gatewayName);
return list.hashCode();
}
@Override
public int compareTo(registerGateway_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetGatewayName()).compareTo(other.isSetGatewayName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetGatewayName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.gatewayName, other.gatewayName);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("registerGateway_args(");
boolean first = true;
sb.append("gatewayName:");
if (this.gatewayName == null) {
sb.append("null");
} else {
sb.append(this.gatewayName);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (gatewayName == null) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'gatewayName' was not present! Struct: " + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class registerGateway_argsStandardSchemeFactory implements SchemeFactory {
public registerGateway_argsStandardScheme getScheme() {
return new registerGateway_argsStandardScheme();
}
}
private static class registerGateway_argsStandardScheme extends StandardScheme<registerGateway_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, registerGateway_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // GATEWAY_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.gatewayName = iprot.readString();
struct.setGatewayNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, registerGateway_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.gatewayName != null) {
oprot.writeFieldBegin(GATEWAY_NAME_FIELD_DESC);
oprot.writeString(struct.gatewayName);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class registerGateway_argsTupleSchemeFactory implements SchemeFactory {
public registerGateway_argsTupleScheme getScheme() {
return new registerGateway_argsTupleScheme();
}
}
private static class registerGateway_argsTupleScheme extends TupleScheme<registerGateway_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, registerGateway_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeString(struct.gatewayName);
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, registerGateway_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.gatewayName = iprot.readString();
struct.setGatewayNameIsSet(true);
}
}
}
public static class registerGateway_result implements org.apache.thrift.TBase<registerGateway_result, registerGateway_result._Fields>, java.io.Serializable, Cloneable, Comparable<registerGateway_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("registerGateway_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new registerGateway_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new registerGateway_resultTupleSchemeFactory());
}
public String success; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(registerGateway_result.class, metaDataMap);
}
public registerGateway_result() {
}
public registerGateway_result(
String success)
{
this();
this.success = success;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public registerGateway_result(registerGateway_result other) {
if (other.isSetSuccess()) {
this.success = other.success;
}
}
public registerGateway_result deepCopy() {
return new registerGateway_result(this);
}
@Override
public void clear() {
this.success = null;
}
public String getSuccess() {
return this.success;
}
public registerGateway_result setSuccess(String success) {
this.success = success;
return this;
}
public void unsetSuccess() {
this.success = null;
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return this.success != null;
}
public void setSuccessIsSet(boolean value) {
if (!value) {
this.success = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return getSuccess();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof registerGateway_result)
return this.equals((registerGateway_result)that);
return false;
}
public boolean equals(registerGateway_result that) {
if (that == null)
return false;
boolean this_present_success = true && this.isSetSuccess();
boolean that_present_success = true && that.isSetSuccess();
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (!this.success.equals(that.success))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_success = true && (isSetSuccess());
list.add(present_success);
if (present_success)
list.add(success);
return list.hashCode();
}
@Override
public int compareTo(registerGateway_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("registerGateway_result(");
boolean first = true;
sb.append("success:");
if (this.success == null) {
sb.append("null");
} else {
sb.append(this.success);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class registerGateway_resultStandardSchemeFactory implements SchemeFactory {
public registerGateway_resultStandardScheme getScheme() {
return new registerGateway_resultStandardScheme();
}
}
private static class registerGateway_resultStandardScheme extends StandardScheme<registerGateway_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, registerGateway_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, registerGateway_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.success != null) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeString(struct.success);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class registerGateway_resultTupleSchemeFactory implements SchemeFactory {
public registerGateway_resultTupleScheme getScheme() {
return new registerGateway_resultTupleScheme();
}
}
private static class registerGateway_resultTupleScheme extends TupleScheme<registerGateway_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, registerGateway_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetSuccess()) {
oprot.writeString(struct.success);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, registerGateway_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
}
}
}
}
public static class getGatewayName_args implements org.apache.thrift.TBase<getGatewayName_args, getGatewayName_args._Fields>, java.io.Serializable, Cloneable, Comparable<getGatewayName_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getGatewayName_args");
private static final org.apache.thrift.protocol.TField GATEWAY_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("gatewayId", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new getGatewayName_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new getGatewayName_argsTupleSchemeFactory());
}
public String gatewayId; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
GATEWAY_ID((short)1, "gatewayId");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // GATEWAY_ID
return GATEWAY_ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.GATEWAY_ID, new org.apache.thrift.meta_data.FieldMetaData("gatewayId", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getGatewayName_args.class, metaDataMap);
}
public getGatewayName_args() {
}
public getGatewayName_args(
String gatewayId)
{
this();
this.gatewayId = gatewayId;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getGatewayName_args(getGatewayName_args other) {
if (other.isSetGatewayId()) {
this.gatewayId = other.gatewayId;
}
}
public getGatewayName_args deepCopy() {
return new getGatewayName_args(this);
}
@Override
public void clear() {
this.gatewayId = null;
}
public String getGatewayId() {
return this.gatewayId;
}
public getGatewayName_args setGatewayId(String gatewayId) {
this.gatewayId = gatewayId;
return this;
}
public void unsetGatewayId() {
this.gatewayId = null;
}
/** Returns true if field gatewayId is set (has been assigned a value) and false otherwise */
public boolean isSetGatewayId() {
return this.gatewayId != null;
}
public void setGatewayIdIsSet(boolean value) {
if (!value) {
this.gatewayId = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case GATEWAY_ID:
if (value == null) {
unsetGatewayId();
} else {
setGatewayId((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case GATEWAY_ID:
return getGatewayId();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case GATEWAY_ID:
return isSetGatewayId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof getGatewayName_args)
return this.equals((getGatewayName_args)that);
return false;
}
public boolean equals(getGatewayName_args that) {
if (that == null)
return false;
boolean this_present_gatewayId = true && this.isSetGatewayId();
boolean that_present_gatewayId = true && that.isSetGatewayId();
if (this_present_gatewayId || that_present_gatewayId) {
if (!(this_present_gatewayId && that_present_gatewayId))
return false;
if (!this.gatewayId.equals(that.gatewayId))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_gatewayId = true && (isSetGatewayId());
list.add(present_gatewayId);
if (present_gatewayId)
list.add(gatewayId);
return list.hashCode();
}
@Override
public int compareTo(getGatewayName_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetGatewayId()).compareTo(other.isSetGatewayId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetGatewayId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.gatewayId, other.gatewayId);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("getGatewayName_args(");
boolean first = true;
sb.append("gatewayId:");
if (this.gatewayId == null) {
sb.append("null");
} else {
sb.append(this.gatewayId);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (gatewayId == null) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'gatewayId' was not present! Struct: " + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getGatewayName_argsStandardSchemeFactory implements SchemeFactory {
public getGatewayName_argsStandardScheme getScheme() {
return new getGatewayName_argsStandardScheme();
}
}
private static class getGatewayName_argsStandardScheme extends StandardScheme<getGatewayName_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getGatewayName_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // GATEWAY_ID
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.gatewayId = iprot.readString();
struct.setGatewayIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getGatewayName_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.gatewayId != null) {
oprot.writeFieldBegin(GATEWAY_ID_FIELD_DESC);
oprot.writeString(struct.gatewayId);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getGatewayName_argsTupleSchemeFactory implements SchemeFactory {
public getGatewayName_argsTupleScheme getScheme() {
return new getGatewayName_argsTupleScheme();
}
}
private static class getGatewayName_argsTupleScheme extends TupleScheme<getGatewayName_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getGatewayName_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeString(struct.gatewayId);
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getGatewayName_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.gatewayId = iprot.readString();
struct.setGatewayIdIsSet(true);
}
}
}
public static class getGatewayName_result implements org.apache.thrift.TBase<getGatewayName_result, getGatewayName_result._Fields>, java.io.Serializable, Cloneable, Comparable<getGatewayName_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getGatewayName_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new getGatewayName_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new getGatewayName_resultTupleSchemeFactory());
}
public String success; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getGatewayName_result.class, metaDataMap);
}
public getGatewayName_result() {
}
public getGatewayName_result(
String success)
{
this();
this.success = success;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getGatewayName_result(getGatewayName_result other) {
if (other.isSetSuccess()) {
this.success = other.success;
}
}
public getGatewayName_result deepCopy() {
return new getGatewayName_result(this);
}
@Override
public void clear() {
this.success = null;
}
public String getSuccess() {
return this.success;
}
public getGatewayName_result setSuccess(String success) {
this.success = success;
return this;
}
public void unsetSuccess() {
this.success = null;
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return this.success != null;
}
public void setSuccessIsSet(boolean value) {
if (!value) {
this.success = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return getSuccess();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof getGatewayName_result)
return this.equals((getGatewayName_result)that);
return false;
}
public boolean equals(getGatewayName_result that) {
if (that == null)
return false;
boolean this_present_success = true && this.isSetSuccess();
boolean that_present_success = true && that.isSetSuccess();
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (!this.success.equals(that.success))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_success = true && (isSetSuccess());
list.add(present_success);
if (present_success)
list.add(success);
return list.hashCode();
}
@Override
public int compareTo(getGatewayName_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("getGatewayName_result(");
boolean first = true;
sb.append("success:");
if (this.success == null) {
sb.append("null");
} else {
sb.append(this.success);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getGatewayName_resultStandardSchemeFactory implements SchemeFactory {
public getGatewayName_resultStandardScheme getScheme() {
return new getGatewayName_resultStandardScheme();
}
}
private static class getGatewayName_resultStandardScheme extends StandardScheme<getGatewayName_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getGatewayName_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getGatewayName_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.success != null) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeString(struct.success);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getGatewayName_resultTupleSchemeFactory implements SchemeFactory {
public getGatewayName_resultTupleScheme getScheme() {
return new getGatewayName_resultTupleScheme();
}
}
private static class getGatewayName_resultTupleScheme extends TupleScheme<getGatewayName_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getGatewayName_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetSuccess()) {
oprot.writeString(struct.success);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getGatewayName_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
}
}
}
}
}
| 8,589 |
0 | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-java-stubs/src/main/java/org/apache/airavata/api | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-java-stubs/src/main/java/org/apache/airavata/api/credentials/CredentialManagementService.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.3)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.airavata.api.credentials;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2016-09-08")
public class CredentialManagementService {
public interface Iface {
/**
* Generate and Register SSH Key Pair with Airavata Credential Store.
*
* @param gatewayId
* The identifier for the requested gateway.
*
* @param userName
* The User for which the credential should be registered. For community accounts, this user is the name of the
* community user name. For computational resources, this user name need not be the same user name on resoruces.
*
* @return airavataCredStoreToken
* An SSH Key pair is generated and stored in the credential store and associated with users or community account
* belonging to a gateway.
*
*
*
* @param gatewayId
* @param userName
*/
public String generateAndRegisterSSHKeys(String gatewayId, String userName) throws org.apache.thrift.TException;
public String getSSHPubKey(String airavataCredStoreToken) throws org.apache.thrift.TException;
public Map<String,String> getAllUserSSHPubKeys(String userName) throws org.apache.thrift.TException;
}
public interface AsyncIface {
public void generateAndRegisterSSHKeys(String gatewayId, String userName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void getSSHPubKey(String airavataCredStoreToken, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void getAllUserSSHPubKeys(String userName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
}
public static class Client extends org.apache.thrift.TServiceClient implements Iface {
public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {
public Factory() {}
public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
return new Client(prot);
}
public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
return new Client(iprot, oprot);
}
}
public Client(org.apache.thrift.protocol.TProtocol prot)
{
super(prot, prot);
}
public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
super(iprot, oprot);
}
public String generateAndRegisterSSHKeys(String gatewayId, String userName) throws org.apache.thrift.TException
{
send_generateAndRegisterSSHKeys(gatewayId, userName);
return recv_generateAndRegisterSSHKeys();
}
public void send_generateAndRegisterSSHKeys(String gatewayId, String userName) throws org.apache.thrift.TException
{
generateAndRegisterSSHKeys_args args = new generateAndRegisterSSHKeys_args();
args.setGatewayId(gatewayId);
args.setUserName(userName);
sendBase("generateAndRegisterSSHKeys", args);
}
public String recv_generateAndRegisterSSHKeys() throws org.apache.thrift.TException
{
generateAndRegisterSSHKeys_result result = new generateAndRegisterSSHKeys_result();
receiveBase(result, "generateAndRegisterSSHKeys");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "generateAndRegisterSSHKeys failed: unknown result");
}
public String getSSHPubKey(String airavataCredStoreToken) throws org.apache.thrift.TException
{
send_getSSHPubKey(airavataCredStoreToken);
return recv_getSSHPubKey();
}
public void send_getSSHPubKey(String airavataCredStoreToken) throws org.apache.thrift.TException
{
getSSHPubKey_args args = new getSSHPubKey_args();
args.setAiravataCredStoreToken(airavataCredStoreToken);
sendBase("getSSHPubKey", args);
}
public String recv_getSSHPubKey() throws org.apache.thrift.TException
{
getSSHPubKey_result result = new getSSHPubKey_result();
receiveBase(result, "getSSHPubKey");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getSSHPubKey failed: unknown result");
}
public Map<String,String> getAllUserSSHPubKeys(String userName) throws org.apache.thrift.TException
{
send_getAllUserSSHPubKeys(userName);
return recv_getAllUserSSHPubKeys();
}
public void send_getAllUserSSHPubKeys(String userName) throws org.apache.thrift.TException
{
getAllUserSSHPubKeys_args args = new getAllUserSSHPubKeys_args();
args.setUserName(userName);
sendBase("getAllUserSSHPubKeys", args);
}
public Map<String,String> recv_getAllUserSSHPubKeys() throws org.apache.thrift.TException
{
getAllUserSSHPubKeys_result result = new getAllUserSSHPubKeys_result();
receiveBase(result, "getAllUserSSHPubKeys");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getAllUserSSHPubKeys failed: unknown result");
}
}
public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
private org.apache.thrift.async.TAsyncClientManager clientManager;
private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
this.clientManager = clientManager;
this.protocolFactory = protocolFactory;
}
public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
return new AsyncClient(protocolFactory, clientManager, transport);
}
}
public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {
super(protocolFactory, clientManager, transport);
}
public void generateAndRegisterSSHKeys(String gatewayId, String userName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
generateAndRegisterSSHKeys_call method_call = new generateAndRegisterSSHKeys_call(gatewayId, userName, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class generateAndRegisterSSHKeys_call extends org.apache.thrift.async.TAsyncMethodCall {
private String gatewayId;
private String userName;
public generateAndRegisterSSHKeys_call(String gatewayId, String userName, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.gatewayId = gatewayId;
this.userName = userName;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("generateAndRegisterSSHKeys", org.apache.thrift.protocol.TMessageType.CALL, 0));
generateAndRegisterSSHKeys_args args = new generateAndRegisterSSHKeys_args();
args.setGatewayId(gatewayId);
args.setUserName(userName);
args.write(prot);
prot.writeMessageEnd();
}
public String getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_generateAndRegisterSSHKeys();
}
}
public void getSSHPubKey(String airavataCredStoreToken, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
getSSHPubKey_call method_call = new getSSHPubKey_call(airavataCredStoreToken, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class getSSHPubKey_call extends org.apache.thrift.async.TAsyncMethodCall {
private String airavataCredStoreToken;
public getSSHPubKey_call(String airavataCredStoreToken, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.airavataCredStoreToken = airavataCredStoreToken;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getSSHPubKey", org.apache.thrift.protocol.TMessageType.CALL, 0));
getSSHPubKey_args args = new getSSHPubKey_args();
args.setAiravataCredStoreToken(airavataCredStoreToken);
args.write(prot);
prot.writeMessageEnd();
}
public String getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_getSSHPubKey();
}
}
public void getAllUserSSHPubKeys(String userName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
getAllUserSSHPubKeys_call method_call = new getAllUserSSHPubKeys_call(userName, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class getAllUserSSHPubKeys_call extends org.apache.thrift.async.TAsyncMethodCall {
private String userName;
public getAllUserSSHPubKeys_call(String userName, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.userName = userName;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getAllUserSSHPubKeys", org.apache.thrift.protocol.TMessageType.CALL, 0));
getAllUserSSHPubKeys_args args = new getAllUserSSHPubKeys_args();
args.setUserName(userName);
args.write(prot);
prot.writeMessageEnd();
}
public Map<String,String> getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_getAllUserSSHPubKeys();
}
}
}
public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
public Processor(I iface) {
super(iface, getProcessMap(new HashMap<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));
}
protected Processor(I iface, Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
super(iface, getProcessMap(processMap));
}
private static <I extends Iface> Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> getProcessMap(Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
processMap.put("generateAndRegisterSSHKeys", new generateAndRegisterSSHKeys());
processMap.put("getSSHPubKey", new getSSHPubKey());
processMap.put("getAllUserSSHPubKeys", new getAllUserSSHPubKeys());
return processMap;
}
public static class generateAndRegisterSSHKeys<I extends Iface> extends org.apache.thrift.ProcessFunction<I, generateAndRegisterSSHKeys_args> {
public generateAndRegisterSSHKeys() {
super("generateAndRegisterSSHKeys");
}
public generateAndRegisterSSHKeys_args getEmptyArgsInstance() {
return new generateAndRegisterSSHKeys_args();
}
protected boolean isOneway() {
return false;
}
public generateAndRegisterSSHKeys_result getResult(I iface, generateAndRegisterSSHKeys_args args) throws org.apache.thrift.TException {
generateAndRegisterSSHKeys_result result = new generateAndRegisterSSHKeys_result();
result.success = iface.generateAndRegisterSSHKeys(args.gatewayId, args.userName);
return result;
}
}
public static class getSSHPubKey<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getSSHPubKey_args> {
public getSSHPubKey() {
super("getSSHPubKey");
}
public getSSHPubKey_args getEmptyArgsInstance() {
return new getSSHPubKey_args();
}
protected boolean isOneway() {
return false;
}
public getSSHPubKey_result getResult(I iface, getSSHPubKey_args args) throws org.apache.thrift.TException {
getSSHPubKey_result result = new getSSHPubKey_result();
result.success = iface.getSSHPubKey(args.airavataCredStoreToken);
return result;
}
}
public static class getAllUserSSHPubKeys<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getAllUserSSHPubKeys_args> {
public getAllUserSSHPubKeys() {
super("getAllUserSSHPubKeys");
}
public getAllUserSSHPubKeys_args getEmptyArgsInstance() {
return new getAllUserSSHPubKeys_args();
}
protected boolean isOneway() {
return false;
}
public getAllUserSSHPubKeys_result getResult(I iface, getAllUserSSHPubKeys_args args) throws org.apache.thrift.TException {
getAllUserSSHPubKeys_result result = new getAllUserSSHPubKeys_result();
result.success = iface.getAllUserSSHPubKeys(args.userName);
return result;
}
}
}
public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {
private static final Logger LOGGER = LoggerFactory.getLogger(AsyncProcessor.class.getName());
public AsyncProcessor(I iface) {
super(iface, getProcessMap(new HashMap<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>>()));
}
protected AsyncProcessor(I iface, Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
super(iface, getProcessMap(processMap));
}
private static <I extends AsyncIface> Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase,?>> getProcessMap(Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
processMap.put("generateAndRegisterSSHKeys", new generateAndRegisterSSHKeys());
processMap.put("getSSHPubKey", new getSSHPubKey());
processMap.put("getAllUserSSHPubKeys", new getAllUserSSHPubKeys());
return processMap;
}
public static class generateAndRegisterSSHKeys<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, generateAndRegisterSSHKeys_args, String> {
public generateAndRegisterSSHKeys() {
super("generateAndRegisterSSHKeys");
}
public generateAndRegisterSSHKeys_args getEmptyArgsInstance() {
return new generateAndRegisterSSHKeys_args();
}
public AsyncMethodCallback<String> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<String>() {
public void onComplete(String o) {
generateAndRegisterSSHKeys_result result = new generateAndRegisterSSHKeys_result();
result.success = o;
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
generateAndRegisterSSHKeys_result result = new generateAndRegisterSSHKeys_result();
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, generateAndRegisterSSHKeys_args args, org.apache.thrift.async.AsyncMethodCallback<String> resultHandler) throws TException {
iface.generateAndRegisterSSHKeys(args.gatewayId, args.userName,resultHandler);
}
}
public static class getSSHPubKey<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getSSHPubKey_args, String> {
public getSSHPubKey() {
super("getSSHPubKey");
}
public getSSHPubKey_args getEmptyArgsInstance() {
return new getSSHPubKey_args();
}
public AsyncMethodCallback<String> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<String>() {
public void onComplete(String o) {
getSSHPubKey_result result = new getSSHPubKey_result();
result.success = o;
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
getSSHPubKey_result result = new getSSHPubKey_result();
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, getSSHPubKey_args args, org.apache.thrift.async.AsyncMethodCallback<String> resultHandler) throws TException {
iface.getSSHPubKey(args.airavataCredStoreToken,resultHandler);
}
}
public static class getAllUserSSHPubKeys<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getAllUserSSHPubKeys_args, Map<String,String>> {
public getAllUserSSHPubKeys() {
super("getAllUserSSHPubKeys");
}
public getAllUserSSHPubKeys_args getEmptyArgsInstance() {
return new getAllUserSSHPubKeys_args();
}
public AsyncMethodCallback<Map<String,String>> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<Map<String,String>>() {
public void onComplete(Map<String,String> o) {
getAllUserSSHPubKeys_result result = new getAllUserSSHPubKeys_result();
result.success = o;
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
getAllUserSSHPubKeys_result result = new getAllUserSSHPubKeys_result();
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, getAllUserSSHPubKeys_args args, org.apache.thrift.async.AsyncMethodCallback<Map<String,String>> resultHandler) throws TException {
iface.getAllUserSSHPubKeys(args.userName,resultHandler);
}
}
}
public static class generateAndRegisterSSHKeys_args implements org.apache.thrift.TBase<generateAndRegisterSSHKeys_args, generateAndRegisterSSHKeys_args._Fields>, java.io.Serializable, Cloneable, Comparable<generateAndRegisterSSHKeys_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("generateAndRegisterSSHKeys_args");
private static final org.apache.thrift.protocol.TField GATEWAY_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("gatewayId", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField USER_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("userName", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new generateAndRegisterSSHKeys_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new generateAndRegisterSSHKeys_argsTupleSchemeFactory());
}
public String gatewayId; // required
public String userName; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
GATEWAY_ID((short)1, "gatewayId"),
USER_NAME((short)2, "userName");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // GATEWAY_ID
return GATEWAY_ID;
case 2: // USER_NAME
return USER_NAME;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.GATEWAY_ID, new org.apache.thrift.meta_data.FieldMetaData("gatewayId", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.USER_NAME, new org.apache.thrift.meta_data.FieldMetaData("userName", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(generateAndRegisterSSHKeys_args.class, metaDataMap);
}
public generateAndRegisterSSHKeys_args() {
}
public generateAndRegisterSSHKeys_args(
String gatewayId,
String userName)
{
this();
this.gatewayId = gatewayId;
this.userName = userName;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public generateAndRegisterSSHKeys_args(generateAndRegisterSSHKeys_args other) {
if (other.isSetGatewayId()) {
this.gatewayId = other.gatewayId;
}
if (other.isSetUserName()) {
this.userName = other.userName;
}
}
public generateAndRegisterSSHKeys_args deepCopy() {
return new generateAndRegisterSSHKeys_args(this);
}
@Override
public void clear() {
this.gatewayId = null;
this.userName = null;
}
public String getGatewayId() {
return this.gatewayId;
}
public generateAndRegisterSSHKeys_args setGatewayId(String gatewayId) {
this.gatewayId = gatewayId;
return this;
}
public void unsetGatewayId() {
this.gatewayId = null;
}
/** Returns true if field gatewayId is set (has been assigned a value) and false otherwise */
public boolean isSetGatewayId() {
return this.gatewayId != null;
}
public void setGatewayIdIsSet(boolean value) {
if (!value) {
this.gatewayId = null;
}
}
public String getUserName() {
return this.userName;
}
public generateAndRegisterSSHKeys_args setUserName(String userName) {
this.userName = userName;
return this;
}
public void unsetUserName() {
this.userName = null;
}
/** Returns true if field userName is set (has been assigned a value) and false otherwise */
public boolean isSetUserName() {
return this.userName != null;
}
public void setUserNameIsSet(boolean value) {
if (!value) {
this.userName = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case GATEWAY_ID:
if (value == null) {
unsetGatewayId();
} else {
setGatewayId((String)value);
}
break;
case USER_NAME:
if (value == null) {
unsetUserName();
} else {
setUserName((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case GATEWAY_ID:
return getGatewayId();
case USER_NAME:
return getUserName();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case GATEWAY_ID:
return isSetGatewayId();
case USER_NAME:
return isSetUserName();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof generateAndRegisterSSHKeys_args)
return this.equals((generateAndRegisterSSHKeys_args)that);
return false;
}
public boolean equals(generateAndRegisterSSHKeys_args that) {
if (that == null)
return false;
boolean this_present_gatewayId = true && this.isSetGatewayId();
boolean that_present_gatewayId = true && that.isSetGatewayId();
if (this_present_gatewayId || that_present_gatewayId) {
if (!(this_present_gatewayId && that_present_gatewayId))
return false;
if (!this.gatewayId.equals(that.gatewayId))
return false;
}
boolean this_present_userName = true && this.isSetUserName();
boolean that_present_userName = true && that.isSetUserName();
if (this_present_userName || that_present_userName) {
if (!(this_present_userName && that_present_userName))
return false;
if (!this.userName.equals(that.userName))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_gatewayId = true && (isSetGatewayId());
list.add(present_gatewayId);
if (present_gatewayId)
list.add(gatewayId);
boolean present_userName = true && (isSetUserName());
list.add(present_userName);
if (present_userName)
list.add(userName);
return list.hashCode();
}
@Override
public int compareTo(generateAndRegisterSSHKeys_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetGatewayId()).compareTo(other.isSetGatewayId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetGatewayId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.gatewayId, other.gatewayId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetUserName()).compareTo(other.isSetUserName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetUserName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userName, other.userName);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("generateAndRegisterSSHKeys_args(");
boolean first = true;
sb.append("gatewayId:");
if (this.gatewayId == null) {
sb.append("null");
} else {
sb.append(this.gatewayId);
}
first = false;
if (!first) sb.append(", ");
sb.append("userName:");
if (this.userName == null) {
sb.append("null");
} else {
sb.append(this.userName);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (gatewayId == null) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'gatewayId' was not present! Struct: " + toString());
}
if (userName == null) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'userName' was not present! Struct: " + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class generateAndRegisterSSHKeys_argsStandardSchemeFactory implements SchemeFactory {
public generateAndRegisterSSHKeys_argsStandardScheme getScheme() {
return new generateAndRegisterSSHKeys_argsStandardScheme();
}
}
private static class generateAndRegisterSSHKeys_argsStandardScheme extends StandardScheme<generateAndRegisterSSHKeys_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, generateAndRegisterSSHKeys_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // GATEWAY_ID
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.gatewayId = iprot.readString();
struct.setGatewayIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // USER_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.userName = iprot.readString();
struct.setUserNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, generateAndRegisterSSHKeys_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.gatewayId != null) {
oprot.writeFieldBegin(GATEWAY_ID_FIELD_DESC);
oprot.writeString(struct.gatewayId);
oprot.writeFieldEnd();
}
if (struct.userName != null) {
oprot.writeFieldBegin(USER_NAME_FIELD_DESC);
oprot.writeString(struct.userName);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class generateAndRegisterSSHKeys_argsTupleSchemeFactory implements SchemeFactory {
public generateAndRegisterSSHKeys_argsTupleScheme getScheme() {
return new generateAndRegisterSSHKeys_argsTupleScheme();
}
}
private static class generateAndRegisterSSHKeys_argsTupleScheme extends TupleScheme<generateAndRegisterSSHKeys_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, generateAndRegisterSSHKeys_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeString(struct.gatewayId);
oprot.writeString(struct.userName);
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, generateAndRegisterSSHKeys_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.gatewayId = iprot.readString();
struct.setGatewayIdIsSet(true);
struct.userName = iprot.readString();
struct.setUserNameIsSet(true);
}
}
}
public static class generateAndRegisterSSHKeys_result implements org.apache.thrift.TBase<generateAndRegisterSSHKeys_result, generateAndRegisterSSHKeys_result._Fields>, java.io.Serializable, Cloneable, Comparable<generateAndRegisterSSHKeys_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("generateAndRegisterSSHKeys_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new generateAndRegisterSSHKeys_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new generateAndRegisterSSHKeys_resultTupleSchemeFactory());
}
public String success; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(generateAndRegisterSSHKeys_result.class, metaDataMap);
}
public generateAndRegisterSSHKeys_result() {
}
public generateAndRegisterSSHKeys_result(
String success)
{
this();
this.success = success;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public generateAndRegisterSSHKeys_result(generateAndRegisterSSHKeys_result other) {
if (other.isSetSuccess()) {
this.success = other.success;
}
}
public generateAndRegisterSSHKeys_result deepCopy() {
return new generateAndRegisterSSHKeys_result(this);
}
@Override
public void clear() {
this.success = null;
}
public String getSuccess() {
return this.success;
}
public generateAndRegisterSSHKeys_result setSuccess(String success) {
this.success = success;
return this;
}
public void unsetSuccess() {
this.success = null;
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return this.success != null;
}
public void setSuccessIsSet(boolean value) {
if (!value) {
this.success = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return getSuccess();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof generateAndRegisterSSHKeys_result)
return this.equals((generateAndRegisterSSHKeys_result)that);
return false;
}
public boolean equals(generateAndRegisterSSHKeys_result that) {
if (that == null)
return false;
boolean this_present_success = true && this.isSetSuccess();
boolean that_present_success = true && that.isSetSuccess();
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (!this.success.equals(that.success))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_success = true && (isSetSuccess());
list.add(present_success);
if (present_success)
list.add(success);
return list.hashCode();
}
@Override
public int compareTo(generateAndRegisterSSHKeys_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("generateAndRegisterSSHKeys_result(");
boolean first = true;
sb.append("success:");
if (this.success == null) {
sb.append("null");
} else {
sb.append(this.success);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class generateAndRegisterSSHKeys_resultStandardSchemeFactory implements SchemeFactory {
public generateAndRegisterSSHKeys_resultStandardScheme getScheme() {
return new generateAndRegisterSSHKeys_resultStandardScheme();
}
}
private static class generateAndRegisterSSHKeys_resultStandardScheme extends StandardScheme<generateAndRegisterSSHKeys_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, generateAndRegisterSSHKeys_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, generateAndRegisterSSHKeys_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.success != null) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeString(struct.success);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class generateAndRegisterSSHKeys_resultTupleSchemeFactory implements SchemeFactory {
public generateAndRegisterSSHKeys_resultTupleScheme getScheme() {
return new generateAndRegisterSSHKeys_resultTupleScheme();
}
}
private static class generateAndRegisterSSHKeys_resultTupleScheme extends TupleScheme<generateAndRegisterSSHKeys_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, generateAndRegisterSSHKeys_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetSuccess()) {
oprot.writeString(struct.success);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, generateAndRegisterSSHKeys_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
}
}
}
}
public static class getSSHPubKey_args implements org.apache.thrift.TBase<getSSHPubKey_args, getSSHPubKey_args._Fields>, java.io.Serializable, Cloneable, Comparable<getSSHPubKey_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getSSHPubKey_args");
private static final org.apache.thrift.protocol.TField AIRAVATA_CRED_STORE_TOKEN_FIELD_DESC = new org.apache.thrift.protocol.TField("airavataCredStoreToken", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new getSSHPubKey_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new getSSHPubKey_argsTupleSchemeFactory());
}
public String airavataCredStoreToken; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
AIRAVATA_CRED_STORE_TOKEN((short)1, "airavataCredStoreToken");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // AIRAVATA_CRED_STORE_TOKEN
return AIRAVATA_CRED_STORE_TOKEN;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.AIRAVATA_CRED_STORE_TOKEN, new org.apache.thrift.meta_data.FieldMetaData("airavataCredStoreToken", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getSSHPubKey_args.class, metaDataMap);
}
public getSSHPubKey_args() {
}
public getSSHPubKey_args(
String airavataCredStoreToken)
{
this();
this.airavataCredStoreToken = airavataCredStoreToken;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getSSHPubKey_args(getSSHPubKey_args other) {
if (other.isSetAiravataCredStoreToken()) {
this.airavataCredStoreToken = other.airavataCredStoreToken;
}
}
public getSSHPubKey_args deepCopy() {
return new getSSHPubKey_args(this);
}
@Override
public void clear() {
this.airavataCredStoreToken = null;
}
public String getAiravataCredStoreToken() {
return this.airavataCredStoreToken;
}
public getSSHPubKey_args setAiravataCredStoreToken(String airavataCredStoreToken) {
this.airavataCredStoreToken = airavataCredStoreToken;
return this;
}
public void unsetAiravataCredStoreToken() {
this.airavataCredStoreToken = null;
}
/** Returns true if field airavataCredStoreToken is set (has been assigned a value) and false otherwise */
public boolean isSetAiravataCredStoreToken() {
return this.airavataCredStoreToken != null;
}
public void setAiravataCredStoreTokenIsSet(boolean value) {
if (!value) {
this.airavataCredStoreToken = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case AIRAVATA_CRED_STORE_TOKEN:
if (value == null) {
unsetAiravataCredStoreToken();
} else {
setAiravataCredStoreToken((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case AIRAVATA_CRED_STORE_TOKEN:
return getAiravataCredStoreToken();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case AIRAVATA_CRED_STORE_TOKEN:
return isSetAiravataCredStoreToken();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof getSSHPubKey_args)
return this.equals((getSSHPubKey_args)that);
return false;
}
public boolean equals(getSSHPubKey_args that) {
if (that == null)
return false;
boolean this_present_airavataCredStoreToken = true && this.isSetAiravataCredStoreToken();
boolean that_present_airavataCredStoreToken = true && that.isSetAiravataCredStoreToken();
if (this_present_airavataCredStoreToken || that_present_airavataCredStoreToken) {
if (!(this_present_airavataCredStoreToken && that_present_airavataCredStoreToken))
return false;
if (!this.airavataCredStoreToken.equals(that.airavataCredStoreToken))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_airavataCredStoreToken = true && (isSetAiravataCredStoreToken());
list.add(present_airavataCredStoreToken);
if (present_airavataCredStoreToken)
list.add(airavataCredStoreToken);
return list.hashCode();
}
@Override
public int compareTo(getSSHPubKey_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetAiravataCredStoreToken()).compareTo(other.isSetAiravataCredStoreToken());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetAiravataCredStoreToken()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.airavataCredStoreToken, other.airavataCredStoreToken);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("getSSHPubKey_args(");
boolean first = true;
sb.append("airavataCredStoreToken:");
if (this.airavataCredStoreToken == null) {
sb.append("null");
} else {
sb.append(this.airavataCredStoreToken);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (airavataCredStoreToken == null) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'airavataCredStoreToken' was not present! Struct: " + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getSSHPubKey_argsStandardSchemeFactory implements SchemeFactory {
public getSSHPubKey_argsStandardScheme getScheme() {
return new getSSHPubKey_argsStandardScheme();
}
}
private static class getSSHPubKey_argsStandardScheme extends StandardScheme<getSSHPubKey_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getSSHPubKey_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // AIRAVATA_CRED_STORE_TOKEN
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.airavataCredStoreToken = iprot.readString();
struct.setAiravataCredStoreTokenIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getSSHPubKey_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.airavataCredStoreToken != null) {
oprot.writeFieldBegin(AIRAVATA_CRED_STORE_TOKEN_FIELD_DESC);
oprot.writeString(struct.airavataCredStoreToken);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getSSHPubKey_argsTupleSchemeFactory implements SchemeFactory {
public getSSHPubKey_argsTupleScheme getScheme() {
return new getSSHPubKey_argsTupleScheme();
}
}
private static class getSSHPubKey_argsTupleScheme extends TupleScheme<getSSHPubKey_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getSSHPubKey_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeString(struct.airavataCredStoreToken);
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getSSHPubKey_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.airavataCredStoreToken = iprot.readString();
struct.setAiravataCredStoreTokenIsSet(true);
}
}
}
public static class getSSHPubKey_result implements org.apache.thrift.TBase<getSSHPubKey_result, getSSHPubKey_result._Fields>, java.io.Serializable, Cloneable, Comparable<getSSHPubKey_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getSSHPubKey_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new getSSHPubKey_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new getSSHPubKey_resultTupleSchemeFactory());
}
public String success; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getSSHPubKey_result.class, metaDataMap);
}
public getSSHPubKey_result() {
}
public getSSHPubKey_result(
String success)
{
this();
this.success = success;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getSSHPubKey_result(getSSHPubKey_result other) {
if (other.isSetSuccess()) {
this.success = other.success;
}
}
public getSSHPubKey_result deepCopy() {
return new getSSHPubKey_result(this);
}
@Override
public void clear() {
this.success = null;
}
public String getSuccess() {
return this.success;
}
public getSSHPubKey_result setSuccess(String success) {
this.success = success;
return this;
}
public void unsetSuccess() {
this.success = null;
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return this.success != null;
}
public void setSuccessIsSet(boolean value) {
if (!value) {
this.success = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return getSuccess();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof getSSHPubKey_result)
return this.equals((getSSHPubKey_result)that);
return false;
}
public boolean equals(getSSHPubKey_result that) {
if (that == null)
return false;
boolean this_present_success = true && this.isSetSuccess();
boolean that_present_success = true && that.isSetSuccess();
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (!this.success.equals(that.success))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_success = true && (isSetSuccess());
list.add(present_success);
if (present_success)
list.add(success);
return list.hashCode();
}
@Override
public int compareTo(getSSHPubKey_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("getSSHPubKey_result(");
boolean first = true;
sb.append("success:");
if (this.success == null) {
sb.append("null");
} else {
sb.append(this.success);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getSSHPubKey_resultStandardSchemeFactory implements SchemeFactory {
public getSSHPubKey_resultStandardScheme getScheme() {
return new getSSHPubKey_resultStandardScheme();
}
}
private static class getSSHPubKey_resultStandardScheme extends StandardScheme<getSSHPubKey_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getSSHPubKey_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getSSHPubKey_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.success != null) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeString(struct.success);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getSSHPubKey_resultTupleSchemeFactory implements SchemeFactory {
public getSSHPubKey_resultTupleScheme getScheme() {
return new getSSHPubKey_resultTupleScheme();
}
}
private static class getSSHPubKey_resultTupleScheme extends TupleScheme<getSSHPubKey_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getSSHPubKey_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetSuccess()) {
oprot.writeString(struct.success);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getSSHPubKey_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
}
}
}
}
public static class getAllUserSSHPubKeys_args implements org.apache.thrift.TBase<getAllUserSSHPubKeys_args, getAllUserSSHPubKeys_args._Fields>, java.io.Serializable, Cloneable, Comparable<getAllUserSSHPubKeys_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getAllUserSSHPubKeys_args");
private static final org.apache.thrift.protocol.TField USER_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("userName", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new getAllUserSSHPubKeys_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new getAllUserSSHPubKeys_argsTupleSchemeFactory());
}
public String userName; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
USER_NAME((short)1, "userName");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // USER_NAME
return USER_NAME;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.USER_NAME, new org.apache.thrift.meta_data.FieldMetaData("userName", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getAllUserSSHPubKeys_args.class, metaDataMap);
}
public getAllUserSSHPubKeys_args() {
}
public getAllUserSSHPubKeys_args(
String userName)
{
this();
this.userName = userName;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getAllUserSSHPubKeys_args(getAllUserSSHPubKeys_args other) {
if (other.isSetUserName()) {
this.userName = other.userName;
}
}
public getAllUserSSHPubKeys_args deepCopy() {
return new getAllUserSSHPubKeys_args(this);
}
@Override
public void clear() {
this.userName = null;
}
public String getUserName() {
return this.userName;
}
public getAllUserSSHPubKeys_args setUserName(String userName) {
this.userName = userName;
return this;
}
public void unsetUserName() {
this.userName = null;
}
/** Returns true if field userName is set (has been assigned a value) and false otherwise */
public boolean isSetUserName() {
return this.userName != null;
}
public void setUserNameIsSet(boolean value) {
if (!value) {
this.userName = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case USER_NAME:
if (value == null) {
unsetUserName();
} else {
setUserName((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case USER_NAME:
return getUserName();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case USER_NAME:
return isSetUserName();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof getAllUserSSHPubKeys_args)
return this.equals((getAllUserSSHPubKeys_args)that);
return false;
}
public boolean equals(getAllUserSSHPubKeys_args that) {
if (that == null)
return false;
boolean this_present_userName = true && this.isSetUserName();
boolean that_present_userName = true && that.isSetUserName();
if (this_present_userName || that_present_userName) {
if (!(this_present_userName && that_present_userName))
return false;
if (!this.userName.equals(that.userName))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_userName = true && (isSetUserName());
list.add(present_userName);
if (present_userName)
list.add(userName);
return list.hashCode();
}
@Override
public int compareTo(getAllUserSSHPubKeys_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetUserName()).compareTo(other.isSetUserName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetUserName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userName, other.userName);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("getAllUserSSHPubKeys_args(");
boolean first = true;
sb.append("userName:");
if (this.userName == null) {
sb.append("null");
} else {
sb.append(this.userName);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (userName == null) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'userName' was not present! Struct: " + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getAllUserSSHPubKeys_argsStandardSchemeFactory implements SchemeFactory {
public getAllUserSSHPubKeys_argsStandardScheme getScheme() {
return new getAllUserSSHPubKeys_argsStandardScheme();
}
}
private static class getAllUserSSHPubKeys_argsStandardScheme extends StandardScheme<getAllUserSSHPubKeys_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getAllUserSSHPubKeys_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // USER_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.userName = iprot.readString();
struct.setUserNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getAllUserSSHPubKeys_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.userName != null) {
oprot.writeFieldBegin(USER_NAME_FIELD_DESC);
oprot.writeString(struct.userName);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getAllUserSSHPubKeys_argsTupleSchemeFactory implements SchemeFactory {
public getAllUserSSHPubKeys_argsTupleScheme getScheme() {
return new getAllUserSSHPubKeys_argsTupleScheme();
}
}
private static class getAllUserSSHPubKeys_argsTupleScheme extends TupleScheme<getAllUserSSHPubKeys_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getAllUserSSHPubKeys_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeString(struct.userName);
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getAllUserSSHPubKeys_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.userName = iprot.readString();
struct.setUserNameIsSet(true);
}
}
}
public static class getAllUserSSHPubKeys_result implements org.apache.thrift.TBase<getAllUserSSHPubKeys_result, getAllUserSSHPubKeys_result._Fields>, java.io.Serializable, Cloneable, Comparable<getAllUserSSHPubKeys_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getAllUserSSHPubKeys_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.MAP, (short)0);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new getAllUserSSHPubKeys_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new getAllUserSSHPubKeys_resultTupleSchemeFactory());
}
public Map<String,String> success; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getAllUserSSHPubKeys_result.class, metaDataMap);
}
public getAllUserSSHPubKeys_result() {
}
public getAllUserSSHPubKeys_result(
Map<String,String> success)
{
this();
this.success = success;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getAllUserSSHPubKeys_result(getAllUserSSHPubKeys_result other) {
if (other.isSetSuccess()) {
Map<String,String> __this__success = new HashMap<String,String>(other.success);
this.success = __this__success;
}
}
public getAllUserSSHPubKeys_result deepCopy() {
return new getAllUserSSHPubKeys_result(this);
}
@Override
public void clear() {
this.success = null;
}
public int getSuccessSize() {
return (this.success == null) ? 0 : this.success.size();
}
public void putToSuccess(String key, String val) {
if (this.success == null) {
this.success = new HashMap<String,String>();
}
this.success.put(key, val);
}
public Map<String,String> getSuccess() {
return this.success;
}
public getAllUserSSHPubKeys_result setSuccess(Map<String,String> success) {
this.success = success;
return this;
}
public void unsetSuccess() {
this.success = null;
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return this.success != null;
}
public void setSuccessIsSet(boolean value) {
if (!value) {
this.success = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((Map<String,String>)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return getSuccess();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof getAllUserSSHPubKeys_result)
return this.equals((getAllUserSSHPubKeys_result)that);
return false;
}
public boolean equals(getAllUserSSHPubKeys_result that) {
if (that == null)
return false;
boolean this_present_success = true && this.isSetSuccess();
boolean that_present_success = true && that.isSetSuccess();
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (!this.success.equals(that.success))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_success = true && (isSetSuccess());
list.add(present_success);
if (present_success)
list.add(success);
return list.hashCode();
}
@Override
public int compareTo(getAllUserSSHPubKeys_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("getAllUserSSHPubKeys_result(");
boolean first = true;
sb.append("success:");
if (this.success == null) {
sb.append("null");
} else {
sb.append(this.success);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getAllUserSSHPubKeys_resultStandardSchemeFactory implements SchemeFactory {
public getAllUserSSHPubKeys_resultStandardScheme getScheme() {
return new getAllUserSSHPubKeys_resultStandardScheme();
}
}
private static class getAllUserSSHPubKeys_resultStandardScheme extends StandardScheme<getAllUserSSHPubKeys_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getAllUserSSHPubKeys_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
org.apache.thrift.protocol.TMap _map0 = iprot.readMapBegin();
struct.success = new HashMap<String,String>(2*_map0.size);
String _key1;
String _val2;
for (int _i3 = 0; _i3 < _map0.size; ++_i3)
{
_key1 = iprot.readString();
_val2 = iprot.readString();
struct.success.put(_key1, _val2);
}
iprot.readMapEnd();
}
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getAllUserSSHPubKeys_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.success != null) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.success.size()));
for (Map.Entry<String, String> _iter4 : struct.success.entrySet())
{
oprot.writeString(_iter4.getKey());
oprot.writeString(_iter4.getValue());
}
oprot.writeMapEnd();
}
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getAllUserSSHPubKeys_resultTupleSchemeFactory implements SchemeFactory {
public getAllUserSSHPubKeys_resultTupleScheme getScheme() {
return new getAllUserSSHPubKeys_resultTupleScheme();
}
}
private static class getAllUserSSHPubKeys_resultTupleScheme extends TupleScheme<getAllUserSSHPubKeys_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getAllUserSSHPubKeys_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetSuccess()) {
{
oprot.writeI32(struct.success.size());
for (Map.Entry<String, String> _iter5 : struct.success.entrySet())
{
oprot.writeString(_iter5.getKey());
oprot.writeString(_iter5.getValue());
}
}
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getAllUserSSHPubKeys_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
{
org.apache.thrift.protocol.TMap _map6 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
struct.success = new HashMap<String,String>(2*_map6.size);
String _key7;
String _val8;
for (int _i9 = 0; _i9 < _map6.size; ++_i9)
{
_key7 = iprot.readString();
_val8 = iprot.readString();
struct.success.put(_key7, _val8);
}
}
struct.setSuccessIsSet(true);
}
}
}
}
}
| 8,590 |
0 | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-client/src/main/java/org/apache/airavata/api | Create_ds/airavata-sandbox/airavata-mock-multiplexed-api/mock-airavata-api-client/src/main/java/org/apache/airavata/api/client/TestAiravataClient.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.api.client;
import org.apache.airavata.api.credentials.CredentialManagementService;
import org.apache.airavata.api.gateway.management.GatewayManagementService;
public class TestAiravataClient {
public static void main(String [] args) {
System.out.println("Testing Airavata API");
CredentialManagementService.Client credentialManagementClient = null;
GatewayManagementService.Client gatewatManagementClient = null;
try {
credentialManagementClient = MockAiravataClientFactory.createCredentialManagementClient("localhost", 9190);
System.out.println("SSH Key is " + credentialManagementClient.generateAndRegisterSSHKeys("test", "test"));
gatewatManagementClient = MockAiravataClientFactory.createGatewayManagementClient("localhost", 9190);
System.out.println("Test Gateway Name is " + gatewatManagementClient.registerGateway("test"));
} catch (Exception e) {
e.printStackTrace();
}
}
}
| 8,591 |
0 | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/test/java/org/apache/airavata/credential/store/impl | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/test/java/org/apache/airavata/credential/store/impl/db/MappingDAOTest.java | package org.apache.airavata.credential.store.impl.db;
import junit.framework.Assert;
import org.apache.airavata.credential.store.Mapping;
import org.junit.Ignore;
import java.util.List;
/**
* DAO class for Mapping.
*/
@Ignore
public class MappingDAOTest extends DAOBaseTestCase {
private MappingDAO mappingDAO;
public void setUp() throws Exception {
super.setUp();
mappingDAO = new MappingDAO(getDbUtil());
}
public void testAddMapping() throws Exception {
Mapping m = new Mapping("gw3", "amila", "lahiru");
mappingDAO.addMapping(m);
String communityUser = mappingDAO.getMappingCommunityUser("lahiru", "gw3");
Assert.assertEquals(communityUser, "amila");
}
public void testDeleteGatewayMapping() throws Exception {
Mapping m = new Mapping("gw4", "amila", "lahiru");
mappingDAO.addMapping(m);
mappingDAO.deleteGatewayMapping("lahiru", "gw4");
String communityUser = mappingDAO.getMappingCommunityUser("lahiru", "gw4");
Assert.assertNull(communityUser);
}
public void testDeleteGatewayCommunityAccountMappings() throws Exception {
Mapping m = new Mapping("gw5", "c2", "lahiru");
mappingDAO.addMapping(m);
mappingDAO.deleteGatewayCommunityAccountMappings("c2", "gw1");
List<String> portalUsers = mappingDAO.getMappingPortalUsers("c2", "gw1");
Assert.assertEquals(0, portalUsers.size());
}
public void testGetMappingPortalUsers() throws Exception {
Mapping m = new Mapping("gw6", "c2", "lahiru");
mappingDAO.addMapping(m);
List<String> portalUsers = mappingDAO.getMappingPortalUsers("c2", "gw6");
Assert.assertEquals(1, portalUsers.size());
Assert.assertEquals("lahiru", portalUsers.get(0));
}
public void testGetMappingCommunityUser() throws Exception {
Mapping m = new Mapping("gw7", "c2", "lahiru");
mappingDAO.addMapping(m);
String communityUser = mappingDAO.getMappingCommunityUser("lahiru", "gw7");
Assert.assertEquals(communityUser, "c2");
}
public void testGetCredentialsForPortalUser() throws Exception {
String certificate = mappingDAO.getCredentials("lahiru", "gw2");
System.out.println(certificate);
}
}
| 8,592 |
0 | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/test/java/org/apache/airavata/credential/store/impl | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/test/java/org/apache/airavata/credential/store/impl/db/DAOBaseTestCase.java | package org.apache.airavata.credential.store.impl.db;
import junit.framework.TestCase;
import org.apache.airavata.credential.store.util.DBUtil;
/**
* Base test class for DB operation testing.
*/
public class DAOBaseTestCase extends TestCase {
private DBUtil dbUtil;
public DAOBaseTestCase() {
dbUtil = new DBUtil(//"jdbc:mysql://localhost/airavata",
// "jdbc:mysql://localhost/airavata",
"jdbc:h2:../../src/test/resources/testdb/test",
// "airavata", "secret", "com.mysql.jdbc.Driver");
// "root", "root123", "com.mysql.jdbc.Driver");
"sa", "sa", "org.h2.Driver");
try {
dbUtil.init();
} catch (Exception e) {
e.printStackTrace();
}
}
protected DBUtil getDbUtil() {
return dbUtil;
}
}
| 8,593 |
0 | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/test/java/org/apache/airavata/credential/store/impl | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/test/java/org/apache/airavata/credential/store/impl/db/CommunityUserDAOTest.java | package org.apache.airavata.credential.store.impl.db;
import org.apache.airavata.credential.store.CommunityUser;
import org.apache.airavata.credential.store.util.DBUtil;
import java.sql.Connection;
import java.util.List;
/**
* Test for community user DAO.
*/
public class CommunityUserDAOTest extends DAOBaseTestCase {
private CommunityUserDAO communityUserDAO;
public void setUp() throws Exception {
super.setUp();
communityUserDAO = new CommunityUserDAO(getDbUtil());
Connection connection = getDbUtil().getConnection();
DBUtil.truncate("community_user", connection);
connection.close();
}
public void testAddCommunityUser() throws Exception {
CommunityUser communityUser = new CommunityUser("gw1", "ogce","ogce@sciencegateway.org");
communityUserDAO.addCommunityUser(communityUser);
communityUser = new CommunityUser("gw1", "ogce2","ogce@sciencegateway.org");
communityUserDAO.addCommunityUser(communityUser);
CommunityUser user = communityUserDAO.getCommunityUser("gw1", "ogce");
assertNotNull(user);
assertEquals("ogce@sciencegateway.org", user.getUserEmail());
user = communityUserDAO.getCommunityUser("gw1", "ogce2");
assertNotNull(user);
assertEquals("ogce@sciencegateway.org", user.getUserEmail());
}
public void testDeleteCommunityUser() throws Exception {
CommunityUser communityUser = new CommunityUser("gw1", "ogce","ogce@sciencegateway.org");
communityUserDAO.addCommunityUser(communityUser);
CommunityUser user = communityUserDAO.getCommunityUser("gw1", "ogce");
assertNotNull(user);
communityUser = new CommunityUser("gw1", "ogce","ogce@sciencegateway.org");
communityUserDAO.deleteCommunityUser(communityUser);
user = communityUserDAO.getCommunityUser("gw1", "ogce");
assertNull(user);
}
public void testGetCommunityUsers() throws Exception {
CommunityUser communityUser = new CommunityUser("gw1", "ogce","ogce@sciencegateway.org");
communityUserDAO.addCommunityUser(communityUser);
CommunityUser user = communityUserDAO.getCommunityUser("gw1", "ogce");
assertNotNull(user);
assertEquals("ogce@sciencegateway.org", user.getUserEmail());
}
public void testGetCommunityUsersForGateway() throws Exception {
CommunityUser communityUser = new CommunityUser("gw1", "ogce","ogce@sciencegateway.org");
communityUserDAO.addCommunityUser(communityUser);
communityUser = new CommunityUser("gw1", "ogce2","ogce@sciencegateway.org");
communityUserDAO.addCommunityUser(communityUser);
List<CommunityUser> users = communityUserDAO.getCommunityUsers("gw1");
assertNotNull(users);
assertEquals(2, users.size());
assertEquals(users.get(0).getUserName(), "ogce");
assertEquals(users.get(1).getUserName(), "ogce2");
}
}
| 8,594 |
0 | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/test/java/org/apache/airavata/credential/store/impl | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/test/java/org/apache/airavata/credential/store/impl/db/CredentialsDAOTest.java | package org.apache.airavata.credential.store.impl.db;
import junit.framework.Assert;
import junit.framework.TestCase;
import org.apache.airavata.credential.store.CertificateCredential;
import org.apache.airavata.credential.store.CommunityUser;
import org.apache.airavata.credential.store.CredentialStoreException;
import org.apache.airavata.credential.store.util.DBUtil;
import java.sql.Connection;
import java.util.List;
/**
* Test class for credential class
*/
public class CredentialsDAOTest extends DAOBaseTestCase {
private CredentialsDAO credentialsDAO;
private String certificateString = "-----BEGIN CERTIFICATE-----\n" +
"MIIDWjCCAkKgAwIBAgIEUHMnRzANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJV\n" +
"UzEQMA4GA1UECBMHSW5kaWFuYTEUMBIGA1UEBxMLQmxvb21pbmd0b24xEDAOBgNV\n" +
"BAoTB0luZGlhbmExCzAJBgNVBAsTAklVMRkwFwYDVQQDExBBbWlsYSBKYXlhc2Vr\n" +
"YXJhMB4XDTEyMTAwODE5MTkzNVoXDTEzMDEwNjE5MTkzNVowbzELMAkGA1UEBhMC\n" +
"VVMxEDAOBgNVBAgTB0luZGlhbmExFDASBgNVBAcTC0Jsb29taW5ndG9uMRAwDgYD\n" +
"VQQKEwdJbmRpYW5hMQswCQYDVQQLEwJJVTEZMBcGA1UEAxMQQW1pbGEgSmF5YXNl\n" +
"a2FyYTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJEdoR4gu32xf8C+\n" +
"H6bVymFWkO6SAM4iAbP5hruDG8HftyfaEmz8MM651X3CoEiPRUeYyoxl5CwSARx6\n" +
"mex1h4Hy7lbVwRKEOnsJwF0POwDo6qV5eFII1ac/XiWpBjEeHpLwoOoOm55pZC6M\n" +
"d/YXQcZhWqpru3OOkK7nozADpOY32A7gAndMjPuuLtT1TsY+mRuHM+o7jv0cKkTM\n" +
"SfJMScqSAWlMrDYyI3lr2nkPsYvCxP+eFp6oY0U604TAYH7ycDmemtm4OEP7pylj\n" +
"HjmH9EpBj+kDwtexpLs6VBcavRne7Mh7JBejkORPcgcEQFSkSURUk6PSrzYMo4oq\n" +
"Y+GxPUMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAJamQWGcmDx59KYeo0WLMQ7Xj\n" +
"15XKddqrdxSJetaFtBJ23XhOFHBesMAVtCKImxw9brRetUYpKV9YfBZdGInolMPX\n" +
"HAeACHVkEeXhGft2sMt/Y9gqFSpROO5ifGKnPRosBzjiWZPAXi6giH8bf3vrQQPB\n" +
"z7j3Dz/1u3zxwMYuTRScZ9b/RQ65Fbs2WmNnlhr8qLkgHke9Hb2r1SV0V7AkxnWb\n" +
"gfsK27V3RUlxZvc24lhWXeRKZDrLPZrU/DscCW4x439IE+9B+Vvq4cD4g8BPoNzM\n" +
"2jZWzXAHStjOsOpCohkXO53jiC8zW6rrqqos83Oo9E2WG8RW801vXegJif1fNQ==\n" +
"-----END CERTIFICATE-----";
private String privateKey = "-----BEGIN PRIVATE KEY-----\n" +
"MIIDWjCCAkKgAwIBAgIEUHMnRzANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJV\n" +
"UzEQMA4GA1UECBMHSW5kaWFuYTEUMBIGA1UEBxMLQmxvb21pbmd0b24xEDAOBgNV\n" +
"BAoTB0luZGlhbmExCzAJBgNVBAsTAklVMRkwFwYDVQQDExBBbWlsYSBKYXlhc2Vr\n" +
"YXJhMB4XDTEyMTAwODE5MTkzNVoXDTEzMDEwNjE5MTkzNVowbzELMAkGA1UEBhMC\n" +
"VVMxEDAOBgNVBAgTB0luZGlhbmExFDASBgNVBAcTC0Jsb29taW5ndG9uMRAwDgYD\n" +
"VQQKEwdJbmRpYW5hMQswCQYDVQQLEwJJVTEZMBcGA1UEAxMQQW1pbGEgSmF5YXNl\n" +
"a2FyYTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJEdoR4gu32xf8C+\n" +
"H6bVymFWkO6SAM4iAbP5hruDG8HftyfaEmz8MM651X3CoEiPRUeYyoxl5CwSARx6\n" +
"mex1h4Hy7lbVwRKEOnsJwF0POwDo6qV5eFII1ac/XiWpBjEeHpLwoOoOm55pZC6M\n" +
"d/YXQcZhWqpru3OOkK7nozADpOY32A7gAndMjPuuLtT1TsY+mRuHM+o7jv0cKkTM\n" +
"SfJMScqSAWlMrDYyI3lr2nkPsYvCxP+eFp6oY0U604TAYH7ycDmemtm4OEP7pylj\n" +
"HjmH9EpBj+kDwtexpLs6VBcavRne7Mh7JBejkORPcgcEQFSkSURUk6PSrzYMo4oq\n" +
"Y+GxPUMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAJamQWGcmDx59KYeo0WLMQ7Xj\n" +
"15XKddqrdxSJetaFtBJ23XhOFHBesMAVtCKImxw9brRetUYpKV9YfBZdGInolMPX\n" +
"HAeACHVkEeXhGft2sMt/Y9gqFSpROO5ifGKnPRosBzjiWZPAXi6giH8bf3vrQQPB\n" +
"z7j3Dz/1u3zxwMYuTRScZ9b/RQ65Fbs2WmNnlhr8qLkgHke9Hb2r1SV0V7AkxnWb\n" +
"gfsK27V3RUlxZvc24lhWXeRKZDrLPZrU/DscCW4x439IE+9B+Vvq4cD4g8BPoNzM\n" +
"2jZWzXAHStjOsOpCohkXO53jiC8zW6rrqqos83Oo9E2WG8RW801vXegJif1fNQ==\n" +
"-----END PRIVATE KEY-----";
private CommunityUser getCommunityUser(String gateway, String name) {
return new CommunityUser(gateway, name, "amila@sciencegateway.org");
}
public void setUp() throws Exception {
super.setUp();
credentialsDAO = new CredentialsDAO(getDbUtil());
// Cleanup tables;
Connection connection = getDbUtil().getConnection();
DBUtil.truncate("credentials", connection);
DBUtil.truncate("community_user", connection);
connection.close();
}
private void addTestCredentials() throws Exception {
CertificateCredential certificateCredential = new CertificateCredential();
certificateCredential.setCertificate(certificateString);
certificateCredential.setPrivateKey(privateKey);
certificateCredential.setCommunityUser(getCommunityUser("gw1", "tom"));
certificateCredential.setLifeTime(1000);
certificateCredential.setPortalUserName("jerry");
certificateCredential.setNotBefore("13 OCT 2012 5:34:23");
certificateCredential.setNotAfter("14 OCT 2012 5:34:23");
credentialsDAO.addCredentials(certificateCredential);
}
public void testAddCredentials() throws Exception {
addTestCredentials();
CertificateCredential certificateCredential
= credentialsDAO.getCredential("gw1", "tom");
Assert.assertNotNull(certificateCredential);
Assert.assertEquals("jerry", certificateCredential.getPortalUserName());
Assert.assertEquals(certificateString, certificateCredential.getCertificate());
Assert.assertEquals(privateKey, certificateCredential.getPrivateKey());
}
public void testDeleteCredentials() throws Exception {
addTestCredentials();
CertificateCredential certificateCredential
= credentialsDAO.getCredential("gw1", "tom");
Assert.assertNotNull(certificateCredential);
credentialsDAO.deleteCredentials("gw1", "tom");
certificateCredential = credentialsDAO.getCredential("gw1", "tom");
Assert.assertNull(certificateCredential);
}
public void testUpdateCredentials() throws Exception {
addTestCredentials();
CertificateCredential certificateCredential = new CertificateCredential();
certificateCredential.setCommunityUser(getCommunityUser("gw1", "tom"));
certificateCredential.setCertificate("new.........Cert");
certificateCredential.setPrivateKey("new..........PrivateKey");
certificateCredential.setPortalUserName("test2");
certificateCredential.setLifeTime(50);
certificateCredential.setNotBefore("15 OCT 2012 5:34:23");
certificateCredential.setNotAfter("16 OCT 2012 5:34:23");
credentialsDAO.updateCredentials(certificateCredential);
certificateCredential = credentialsDAO.getCredential("gw1", "tom");
Assert.assertEquals("new.........Cert", certificateCredential.getCertificate());
Assert.assertEquals("new..........PrivateKey", certificateCredential.getPrivateKey());
Assert.assertEquals("test2", certificateCredential.getPortalUserName());
}
public void testGetCredentials() throws Exception {
addTestCredentials();
CertificateCredential certificateCredential = credentialsDAO.getCredential("gw1", "tom");
Assert.assertEquals(certificateString, certificateCredential.getCertificate());
Assert.assertEquals(privateKey, certificateCredential.getPrivateKey());
}
public void testGetGatewayCredentials() throws Exception {
addTestCredentials();
List<CertificateCredential> list = credentialsDAO.getCredentials("gw1");
Assert.assertEquals(1, list.size());
}
}
| 8,595 |
0 | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/main/java/org/apache/airavata/credential | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/main/java/org/apache/airavata/credential/store/CredentialStore.java | package org.apache.airavata.credential.store;
/**
* This interface provides an API for Credential Store.
* Provides methods to manipulate credential store data.
*/
public interface CredentialStore {
/**
* Gets the admin portal user name who retrieved given community user for
* given portal user name.
* @param gatewayName The gateway name
* @param communityUser The community user name.
* @return The portal user name who requested given community user credentials.
*/
String getPortalUser(String gatewayName, String communityUser) throws CredentialStoreException;
/**
* Gets audit information related to given gateway name and community
* user name.
* @param gatewayName The gateway name.
* @param communityUser The community user name.
* @return AuditInfo object.
*/
AuditInfo getAuditInfo(String gatewayName, String communityUser) throws CredentialStoreException;
/**
* Updates the community user contact email address.
* @param gatewayName The gateway name.
* @param communityUser The community user name.
* @param email The new email address.
*/
void updateCommunityUserEmail(String gatewayName, String communityUser, String email) throws CredentialStoreException;
/**
* Will remove credentials for the given gateway id and community user.
* @param gatewayName The gateway Id
* @param communityUser The community user name.
* @throws CredentialStoreException If an error occurred while retrieving data.
*/
void removeCredentials(String gatewayName, String communityUser) throws CredentialStoreException;
}
| 8,596 |
0 | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/main/java/org/apache/airavata/credential | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/main/java/org/apache/airavata/credential/store/CommunityUser.java | package org.apache.airavata.credential.store;
/**
* Represents the community user.
*/
public class CommunityUser {
private String gatewayName;
private String userName;
private String userEmail;
public String getGatewayName() {
return gatewayName;
}
public void setGatewayName(String gatewayName) {
this.gatewayName = gatewayName;
}
public String getUserEmail() {
return userEmail;
}
public void setUserEmail(String userEmail) {
this.userEmail = userEmail;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public CommunityUser(String gatewayName, String userName, String userEmail) {
this.gatewayName = gatewayName;
this.userName = userName;
this.userEmail = userEmail;
}
public CommunityUser(String gatewayName, String userName) {
this.gatewayName = gatewayName;
this.userName = userName;
}
}
| 8,597 |
0 | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/main/java/org/apache/airavata/credential | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/main/java/org/apache/airavata/credential/store/CertificateCredential.java | package org.apache.airavata.credential.store;
import java.util.Date;
/**
* Represents the certificate credentials.
*/
public class CertificateCredential implements Credential {
public CertificateCredential() {
}
/**
* The community user associated with this credentials.
*/
private CommunityUser communityUser;
private String certificate;
public String getPrivateKey() {
return privateKey;
}
public void setPrivateKey(String privateKey) {
this.privateKey = privateKey;
}
private String privateKey;
private long lifeTime;
private String portalUserName;
private String notBefore;
public String getNotBefore() {
return notBefore;
}
public void setNotBefore(String notBefore) {
this.notBefore = notBefore;
}
public String getNotAfter() {
return notAfter;
}
public void setNotAfter(String notAfter) {
this.notAfter = notAfter;
}
private String notAfter;
public Date getCertificateRequestedTime() {
return certificateRequestedTime;
}
public void setCertificateRequestedTime(Date certificateRequestedTime) {
this.certificateRequestedTime = certificateRequestedTime;
}
private Date certificateRequestedTime;
public String getCertificate() {
return certificate;
}
public void setCertificate(String certificate) {
this.certificate = certificate;
}
public long getLifeTime() {
return lifeTime;
}
public void setLifeTime(long lifeTime) {
this.lifeTime = lifeTime;
}
public String getPortalUserName() {
return portalUserName;
}
public void setPortalUserName(String portalUserName) {
this.portalUserName = portalUserName;
}
public CommunityUser getCommunityUser() {
return communityUser;
}
public void setCommunityUser(CommunityUser communityUser) {
this.communityUser = communityUser;
}
}
| 8,598 |
0 | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/main/java/org/apache/airavata/credential | Create_ds/airavata-sandbox/airavata-rest-security/modules/credential-store/src/main/java/org/apache/airavata/credential/store/CredentialStoreException.java | package org.apache.airavata.credential.store;
/**
* An exception class for credential store.
*/
public class CredentialStoreException extends Exception {
public CredentialStoreException() {
super();
}
public CredentialStoreException(String s) {
super(s);
}
public CredentialStoreException(String s, Throwable throwable) {
super(s, throwable);
}
}
| 8,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.