text
stringlengths
1
22.8M
```java /* * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * * path_to_url * * Unless required by applicable law or agreed to in writing, * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * specific language governing permissions and limitations */ package org.apache.pulsar.broker.namespace; import static com.google.common.base.Preconditions.checkArgument; import static java.lang.String.format; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.pulsar.client.api.PulsarClientException.FailedFeatureCheck.SupportsGetPartitionedMetadataWithoutAutoCreation; import static org.apache.pulsar.common.naming.NamespaceName.SYSTEM_NAMESPACE; import com.google.common.hash.Hashing; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.DoubleHistogram; import io.prometheus.client.Counter; import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.ListUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.mutable.MutableBoolean; import org.apache.commons.lang3.tuple.Pair; import org.apache.pulsar.broker.PulsarServerException; import org.apache.pulsar.broker.PulsarService; import org.apache.pulsar.broker.ServiceConfiguration; import org.apache.pulsar.broker.loadbalance.LeaderBroker; import org.apache.pulsar.broker.loadbalance.LeaderElectionService; import org.apache.pulsar.broker.loadbalance.LoadManager; import org.apache.pulsar.broker.loadbalance.ResourceUnit; import org.apache.pulsar.broker.loadbalance.extensions.ExtensibleLoadManagerImpl; import org.apache.pulsar.broker.loadbalance.extensions.manager.RedirectManager; import org.apache.pulsar.broker.lookup.LookupResult; import org.apache.pulsar.broker.resources.NamespaceResources; import org.apache.pulsar.broker.service.BrokerServiceException.ServiceUnitNotReadyException; import org.apache.pulsar.broker.service.Topic; import org.apache.pulsar.broker.service.nonpersistent.NonPersistentTopic; import org.apache.pulsar.broker.stats.prometheus.metrics.Summary; import org.apache.pulsar.broker.web.PulsarWebResource; import org.apache.pulsar.client.admin.PulsarAdmin; import org.apache.pulsar.client.admin.PulsarAdminException; import org.apache.pulsar.client.api.ClientBuilder; import org.apache.pulsar.client.api.PulsarClient; import org.apache.pulsar.client.api.PulsarClientException; import org.apache.pulsar.client.api.SizeUnit; import org.apache.pulsar.client.impl.ClientBuilderImpl; import org.apache.pulsar.client.impl.PulsarClientImpl; import org.apache.pulsar.client.impl.conf.ClientConfigurationData; import org.apache.pulsar.client.internal.PropertiesUtils; import org.apache.pulsar.common.api.proto.CommandGetTopicsOfNamespace.Mode; import org.apache.pulsar.common.lookup.GetTopicsResult; import org.apache.pulsar.common.lookup.data.LookupData; import org.apache.pulsar.common.naming.BundleSplitOption; import org.apache.pulsar.common.naming.FlowOrQpsEquallyDivideBundleSplitOption; import org.apache.pulsar.common.naming.NamespaceBundle; import org.apache.pulsar.common.naming.NamespaceBundleFactory; import org.apache.pulsar.common.naming.NamespaceBundleSplitAlgorithm; import org.apache.pulsar.common.naming.NamespaceBundles; import org.apache.pulsar.common.naming.NamespaceName; import org.apache.pulsar.common.naming.ServiceUnitId; import org.apache.pulsar.common.naming.TopicDomain; import org.apache.pulsar.common.naming.TopicName; import org.apache.pulsar.common.policies.NamespaceIsolationPolicy; import org.apache.pulsar.common.policies.data.BrokerAssignment; import org.apache.pulsar.common.policies.data.ClusterDataImpl; import org.apache.pulsar.common.policies.data.LocalPolicies; import org.apache.pulsar.common.policies.data.NamespaceOwnershipStatus; import org.apache.pulsar.common.policies.data.Policies; import org.apache.pulsar.common.policies.data.stats.TopicStatsImpl; import org.apache.pulsar.common.policies.impl.NamespaceIsolationPolicies; import org.apache.pulsar.common.stats.MetricsUtil; import org.apache.pulsar.common.topics.TopicList; import org.apache.pulsar.common.util.FutureUtil; import org.apache.pulsar.common.util.collections.ConcurrentOpenHashMap; import org.apache.pulsar.metadata.api.MetadataCache; import org.apache.pulsar.metadata.api.MetadataStoreException; import org.apache.pulsar.opentelemetry.annotations.PulsarDeprecatedMetric; import org.apache.pulsar.policies.data.loadbalancer.AdvertisedListener; import org.apache.pulsar.policies.data.loadbalancer.LocalBrokerData; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The <code>NamespaceService</code> provides resource ownership lookup as well as resource ownership claiming services * for the <code>PulsarService</code>. * <p/> * The <code>PulsarService</code> relies on this service for resource ownership operations. * <p/> * The focus of this phase is to bring up the system and be able to iterate and improve the services effectively. * <p/> * * @see org.apache.pulsar.broker.PulsarService */ @Slf4j public class NamespaceService implements AutoCloseable { private static final Logger LOG = LoggerFactory.getLogger(NamespaceService.class); private final ServiceConfiguration config; private final AtomicReference<LoadManager> loadManager; private final PulsarService pulsar; private final OwnershipCache ownershipCache; private final MetadataCache<LocalBrokerData> localBrokerDataCache; private final NamespaceBundleFactory bundleFactory; private final String host; public static final int BUNDLE_SPLIT_RETRY_LIMIT = 7; public static final String SLA_NAMESPACE_PROPERTY = "sla-monitor"; public static final Pattern HEARTBEAT_NAMESPACE_PATTERN = Pattern.compile("pulsar/[^/]+/([^:]+:\\d+)"); public static final Pattern HEARTBEAT_NAMESPACE_PATTERN_V2 = Pattern.compile("pulsar/([^:]+:\\d+)"); public static final Pattern SLA_NAMESPACE_PATTERN = Pattern.compile(SLA_NAMESPACE_PROPERTY + "/[^/]+/([^:]+:\\d+)"); public static final String HEARTBEAT_NAMESPACE_FMT = "pulsar/%s/%s"; public static final String HEARTBEAT_NAMESPACE_FMT_V2 = "pulsar/%s"; public static final String SLA_NAMESPACE_FMT = SLA_NAMESPACE_PROPERTY + "/%s/%s"; private final ConcurrentOpenHashMap<ClusterDataImpl, PulsarClientImpl> namespaceClients; private final List<NamespaceBundleOwnershipListener> bundleOwnershipListeners; private final List<NamespaceBundleSplitListener> bundleSplitListeners; private final RedirectManager redirectManager; public static final String LOOKUP_REQUEST_DURATION_METRIC_NAME = "pulsar.broker.request.topic.lookup.duration"; private static final AttributeKey<String> PULSAR_LOOKUP_RESPONSE_ATTRIBUTE = AttributeKey.stringKey("pulsar.lookup.response"); public static final Attributes PULSAR_LOOKUP_RESPONSE_BROKER_ATTRIBUTES = Attributes.builder() .put(PULSAR_LOOKUP_RESPONSE_ATTRIBUTE, "broker") .build(); public static final Attributes PULSAR_LOOKUP_RESPONSE_REDIRECT_ATTRIBUTES = Attributes.builder() .put(PULSAR_LOOKUP_RESPONSE_ATTRIBUTE, "redirect") .build(); public static final Attributes PULSAR_LOOKUP_RESPONSE_FAILURE_ATTRIBUTES = Attributes.builder() .put(PULSAR_LOOKUP_RESPONSE_ATTRIBUTE, "failure") .build(); @PulsarDeprecatedMetric(newMetricName = LOOKUP_REQUEST_DURATION_METRIC_NAME) private static final Counter lookupRedirects = Counter.build("pulsar_broker_lookup_redirects", "-").register(); @PulsarDeprecatedMetric(newMetricName = LOOKUP_REQUEST_DURATION_METRIC_NAME) private static final Counter lookupFailures = Counter.build("pulsar_broker_lookup_failures", "-").register(); @PulsarDeprecatedMetric(newMetricName = LOOKUP_REQUEST_DURATION_METRIC_NAME) private static final Counter lookupAnswers = Counter.build("pulsar_broker_lookup_answers", "-").register(); @PulsarDeprecatedMetric(newMetricName = LOOKUP_REQUEST_DURATION_METRIC_NAME) private static final Summary lookupLatency = Summary.build("pulsar_broker_lookup", "-") .quantile(0.50) .quantile(0.99) .quantile(0.999) .quantile(1.0) .register(); private final DoubleHistogram lookupLatencyHistogram; private ConcurrentHashMap<String, CompletableFuture<List<String>>> inProgressQueryUserTopics = new ConcurrentHashMap<>(); /** * Default constructor. */ public NamespaceService(PulsarService pulsar) { this.pulsar = pulsar; this.host = pulsar.getAdvertisedAddress(); this.config = pulsar.getConfiguration(); this.loadManager = pulsar.getLoadManager(); this.bundleFactory = new NamespaceBundleFactory(pulsar, Hashing.crc32()); this.ownershipCache = new OwnershipCache(pulsar, this); this.namespaceClients = ConcurrentOpenHashMap.<ClusterDataImpl, PulsarClientImpl>newBuilder().build(); this.bundleOwnershipListeners = new CopyOnWriteArrayList<>(); this.bundleSplitListeners = new CopyOnWriteArrayList<>(); this.localBrokerDataCache = pulsar.getLocalMetadataStore().getMetadataCache(LocalBrokerData.class); this.redirectManager = new RedirectManager(pulsar); this.lookupLatencyHistogram = pulsar.getOpenTelemetry().getMeter() .histogramBuilder(LOOKUP_REQUEST_DURATION_METRIC_NAME) .setDescription("The duration of topic lookup requests (either binary or HTTP)") .setUnit("s") .build(); } public void initialize() { if (!getOwnershipCache().refreshSelfOwnerInfo()) { throw new RuntimeException("Failed to refresh self owner info."); } } public CompletableFuture<Optional<LookupResult>> getBrokerServiceUrlAsync(TopicName topic, LookupOptions options) { long startTime = System.nanoTime(); CompletableFuture<Optional<LookupResult>> future = getBundleAsync(topic) .thenCompose(bundle -> { // Do redirection if the cluster is in rollback or deploying. return findRedirectLookupResultAsync(bundle).thenCompose(optResult -> { if (optResult.isPresent()) { LOG.info("[{}] Redirect lookup request to {} for topic {}", pulsar.getBrokerId(), optResult.get(), topic); return CompletableFuture.completedFuture(optResult); } if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { return loadManager.get().findBrokerServiceUrl(Optional.of(topic), bundle, options); } else { // TODO: Add unit tests cover it. return findBrokerServiceUrl(bundle, options); } }); }); future.whenComplete((lookupResult, throwable) -> { var latencyNs = System.nanoTime() - startTime; lookupLatency.observe(latencyNs, TimeUnit.NANOSECONDS); Attributes attributes; if (throwable == null) { if (lookupResult.isPresent()) { if (lookupResult.get().isRedirect()) { lookupRedirects.inc(); attributes = PULSAR_LOOKUP_RESPONSE_REDIRECT_ATTRIBUTES; } else { lookupAnswers.inc(); attributes = PULSAR_LOOKUP_RESPONSE_BROKER_ATTRIBUTES; } } else { // No lookup result, default to reporting as failure. attributes = PULSAR_LOOKUP_RESPONSE_FAILURE_ATTRIBUTES; } } else { lookupFailures.inc(); attributes = PULSAR_LOOKUP_RESPONSE_FAILURE_ATTRIBUTES; } lookupLatencyHistogram.record(MetricsUtil.convertToSeconds(latencyNs, TimeUnit.NANOSECONDS), attributes); }); return future; } private CompletableFuture<Optional<LookupResult>> findRedirectLookupResultAsync(ServiceUnitId bundle) { if (isSLAOrHeartbeatNamespace(bundle.getNamespaceObject().toString())) { return CompletableFuture.completedFuture(Optional.empty()); } return redirectManager.findRedirectLookupResultAsync(); } public CompletableFuture<NamespaceBundle> getBundleAsync(TopicName topic) { return bundleFactory.getBundlesAsync(topic.getNamespaceObject()) .thenApply(bundles -> bundles.findBundle(topic)); } public Optional<NamespaceBundle> getBundleIfPresent(TopicName topicName) { Optional<NamespaceBundles> bundles = bundleFactory.getBundlesIfPresent(topicName.getNamespaceObject()); return bundles.map(b -> b.findBundle(topicName)); } public NamespaceBundle getBundle(TopicName topicName) { return bundleFactory.getBundles(topicName.getNamespaceObject()).findBundle(topicName); } public int getBundleCount(NamespaceName namespace) throws Exception { return bundleFactory.getBundles(namespace).size(); } private NamespaceBundle getFullBundle(NamespaceName fqnn) throws Exception { return bundleFactory.getFullBundle(fqnn); } private CompletableFuture<NamespaceBundle> getFullBundleAsync(NamespaceName fqnn) { return bundleFactory.getFullBundleAsync(fqnn); } /** * Return the URL of the broker who's owning a particular service unit in asynchronous way. * <p> * If the service unit is not owned, return a CompletableFuture with empty optional. */ public CompletableFuture<Optional<URL>> getWebServiceUrlAsync(ServiceUnitId suName, LookupOptions options) { if (suName instanceof TopicName name) { if (LOG.isDebugEnabled()) { LOG.debug("Getting web service URL of topic: {} - options: {}", name, options); } return getBundleAsync(name) .thenCompose(namespaceBundle -> internalGetWebServiceUrl(name, namespaceBundle, options)); } if (suName instanceof NamespaceName namespaceName) { return getFullBundleAsync(namespaceName) .thenCompose(namespaceBundle -> internalGetWebServiceUrl(null, namespaceBundle, options)); } if (suName instanceof NamespaceBundle namespaceBundle) { return internalGetWebServiceUrl(null, namespaceBundle, options); } throw new IllegalArgumentException("Unrecognized class of NamespaceBundle: " + suName.getClass().getName()); } /** * Return the URL of the broker who's owning a particular service unit. * <p> * If the service unit is not owned, return an empty optional */ public Optional<URL> getWebServiceUrl(ServiceUnitId suName, LookupOptions options) throws Exception { return getWebServiceUrlAsync(suName, options) .get(pulsar.getConfiguration().getMetadataStoreOperationTimeoutSeconds(), SECONDS); } private CompletableFuture<Optional<URL>> internalGetWebServiceUrl(@Nullable ServiceUnitId topic, NamespaceBundle bundle, LookupOptions options) { return findRedirectLookupResultAsync(bundle).thenCompose(optResult -> { if (optResult.isPresent()) { LOG.info("[{}] Redirect lookup request to {} for topic {}", pulsar.getBrokerId(), optResult.get(), topic); try { LookupData lookupData = optResult.get().getLookupData(); final String redirectUrl = options.isRequestHttps() ? lookupData.getHttpUrlTls() : lookupData.getHttpUrl(); return CompletableFuture.completedFuture(Optional.of(new URL(redirectUrl))); } catch (Exception e) { // just log the exception, nothing else to do LOG.warn("internalGetWebServiceUrl [{}]", e.getMessage(), e); } return CompletableFuture.completedFuture(Optional.empty()); } CompletableFuture<Optional<LookupResult>> future = ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar) ? loadManager.get().findBrokerServiceUrl(Optional.ofNullable(topic), bundle, options) : findBrokerServiceUrl(bundle, options); return future.thenApply(lookupResult -> { if (lookupResult.isPresent()) { try { LookupData lookupData = lookupResult.get().getLookupData(); final String redirectUrl = options.isRequestHttps() ? lookupData.getHttpUrlTls() : lookupData.getHttpUrl(); return Optional.of(new URL(redirectUrl)); } catch (Exception e) { // just log the exception, nothing else to do LOG.warn("internalGetWebServiceUrl [{}]", e.getMessage(), e); } } return Optional.empty(); }); }); } /** * Register all the bootstrap name spaces including the heartbeat namespace. * * @throws PulsarServerException if an unexpected error occurs */ public void registerBootstrapNamespaces() throws PulsarServerException { String brokerId = pulsar.getBrokerId(); // ensure that we own the heartbeat namespace if (registerNamespace(getHeartbeatNamespace(brokerId, config), true)) { LOG.info("added heartbeat namespace name in local cache: ns={}", getHeartbeatNamespace(brokerId, config)); } // ensure that we own the heartbeat namespace if (registerNamespace(getHeartbeatNamespaceV2(brokerId, config), true)) { LOG.info("added heartbeat namespace name in local cache: ns={}", getHeartbeatNamespaceV2(brokerId, config)); } // we may not need strict ownership checking for bootstrap names for now for (String namespace : config.getBootstrapNamespaces()) { if (registerNamespace(NamespaceName.get(namespace), false)) { LOG.info("added bootstrap namespace name in local cache: ns={}", namespace); } } } /** * Tries to register a namespace to this instance. * * @param nsname namespace name * @param ensureOwned sets the behavior when the namespace is already owned by another broker. * If this flag is set to true, then the method will throw an exception. * If this flag is set to false, then the method will return false. * @return true if the namespace was successfully registered, false otherwise * @throws PulsarServerException if an error occurs when registering the namespace */ public boolean registerNamespace(NamespaceName nsname, boolean ensureOwned) throws PulsarServerException { try { // all pre-registered namespace is assumed to have bundles disabled NamespaceBundle nsFullBundle = bundleFactory.getFullBundle(nsname); // v2 namespace will always use full bundle object final NamespaceEphemeralData otherData; if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { ExtensibleLoadManagerImpl loadManager = ExtensibleLoadManagerImpl.get(this.loadManager.get()); otherData = loadManager.tryAcquiringOwnership(nsFullBundle).get(); } else { otherData = ownershipCache.tryAcquiringOwnership(nsFullBundle).get(); } if (StringUtils.equals(pulsar.getBrokerServiceUrl(), otherData.getNativeUrl()) || StringUtils.equals(pulsar.getBrokerServiceUrlTls(), otherData.getNativeUrlTls())) { if (nsFullBundle != null) { // preload heartbeat namespace pulsar.loadNamespaceTopics(nsFullBundle); } return true; } String msg = String.format("namespace already owned by other broker : ns=%s expected=%s actual=%s", nsname, StringUtils.defaultString(pulsar.getBrokerServiceUrl(), pulsar.getBrokerServiceUrlTls()), StringUtils.defaultString(otherData.getNativeUrl(), otherData.getNativeUrlTls())); // ignore if not be owned for now if (!ensureOwned) { LOG.info(msg); return false; } // should not happen throw new IllegalStateException(msg); } catch (Exception e) { LOG.error(e.getMessage(), e); throw new PulsarServerException(e); } } private final ConcurrentOpenHashMap<NamespaceBundle, CompletableFuture<Optional<LookupResult>>> findingBundlesAuthoritative = ConcurrentOpenHashMap.<NamespaceBundle, CompletableFuture<Optional<LookupResult>>>newBuilder() .build(); private final ConcurrentOpenHashMap<NamespaceBundle, CompletableFuture<Optional<LookupResult>>> findingBundlesNotAuthoritative = ConcurrentOpenHashMap.<NamespaceBundle, CompletableFuture<Optional<LookupResult>>>newBuilder() .build(); /** * Main internal method to lookup and setup ownership of service unit to a broker. * * @param bundle the namespace bundle * @param options the lookup options * @return the lookup result */ private CompletableFuture<Optional<LookupResult>> findBrokerServiceUrl( NamespaceBundle bundle, LookupOptions options) { if (LOG.isDebugEnabled()) { LOG.debug("findBrokerServiceUrl: {} - options: {}", bundle, options); } ConcurrentOpenHashMap<NamespaceBundle, CompletableFuture<Optional<LookupResult>>> targetMap; if (options.isAuthoritative()) { targetMap = findingBundlesAuthoritative; } else { targetMap = findingBundlesNotAuthoritative; } return targetMap.computeIfAbsent(bundle, (k) -> { CompletableFuture<Optional<LookupResult>> future = new CompletableFuture<>(); // First check if we or someone else already owns the bundle ownershipCache.getOwnerAsync(bundle).thenAccept(nsData -> { if (nsData.isEmpty()) { // No one owns this bundle if (options.isReadOnly()) { // Do not attempt to acquire ownership future.complete(Optional.empty()); } else { // Now, no one owns the namespace yet. Hence, we will try to dynamically assign it pulsar.getExecutor().execute(() -> searchForCandidateBroker(bundle, future, options)); } } else if (nsData.get().isDisabled()) { future.completeExceptionally( new IllegalStateException(String.format("Namespace bundle %s is being unloaded", bundle))); } else { if (LOG.isDebugEnabled()) { LOG.debug("Namespace bundle {} already owned by {} ", bundle, nsData); } // find the target if (options.hasAdvertisedListenerName()) { AdvertisedListener listener = nsData.get().getAdvertisedListeners().get(options.getAdvertisedListenerName()); if (listener == null) { future.completeExceptionally( new PulsarServerException("the broker do not have " + options.getAdvertisedListenerName() + " listener")); } else { URI url = listener.getBrokerServiceUrl(); URI urlTls = listener.getBrokerServiceUrlTls(); future.complete(Optional.of(new LookupResult(nsData.get(), url == null ? null : url.toString(), urlTls == null ? null : urlTls.toString()))); } } else { future.complete(Optional.of(new LookupResult(nsData.get()))); } } }).exceptionally(exception -> { LOG.warn("Failed to check owner for bundle {}: {}", bundle, exception.getMessage(), exception); future.completeExceptionally(exception); return null; }); future.whenComplete((r, t) -> pulsar.getExecutor().execute( () -> targetMap.remove(bundle) )); return future; }); } /** * Check if this is Heartbeat or SLAMonitor namespace and return the broker id. * * @param serviceUnit the service unit * @param isBrokerActive the function to check if the broker is active * @return the broker id */ public CompletableFuture<String> getHeartbeatOrSLAMonitorBrokerId( ServiceUnitId serviceUnit, Function<String, CompletableFuture<Boolean>> isBrokerActive) { String candidateBroker = NamespaceService.checkHeartbeatNamespace(serviceUnit); if (candidateBroker != null) { return CompletableFuture.completedFuture(candidateBroker); } candidateBroker = NamespaceService.checkHeartbeatNamespaceV2(serviceUnit); if (candidateBroker != null) { return CompletableFuture.completedFuture(candidateBroker); } candidateBroker = NamespaceService.getSLAMonitorBrokerName(serviceUnit); if (candidateBroker != null) { // Check if the broker is available final String finalCandidateBroker = candidateBroker; return isBrokerActive.apply(candidateBroker).thenApply(isActive -> { if (isActive) { return finalCandidateBroker; } else { return null; } }); } return CompletableFuture.completedFuture(null); } private void searchForCandidateBroker(NamespaceBundle bundle, CompletableFuture<Optional<LookupResult>> lookupFuture, LookupOptions options) { String candidateBroker; LeaderElectionService les = pulsar.getLeaderElectionService(); if (les == null) { LOG.warn("The leader election has not yet been completed! NamespaceBundle[{}]", bundle); lookupFuture.completeExceptionally( new IllegalStateException("The leader election has not yet been completed!")); return; } boolean authoritativeRedirect = les.isLeader(); try { // check if this is Heartbeat or SLAMonitor namespace candidateBroker = getHeartbeatOrSLAMonitorBrokerId(bundle, cb -> CompletableFuture.completedFuture(isBrokerActive(cb))) .get(config.getMetadataStoreOperationTimeoutSeconds(), SECONDS); if (candidateBroker == null) { Optional<LeaderBroker> currentLeader = pulsar.getLeaderElectionService().getCurrentLeader(); if (options.isAuthoritative()) { // leader broker already assigned the current broker as owner candidateBroker = pulsar.getBrokerId(); } else { LoadManager loadManager = this.loadManager.get(); boolean makeLoadManagerDecisionOnThisBroker = !loadManager.isCentralized() || les.isLeader(); if (!makeLoadManagerDecisionOnThisBroker) { // If leader is not active, fallback to pick the least loaded from current broker loadmanager boolean leaderBrokerActive = currentLeader.isPresent() && isBrokerActive(currentLeader.get().getBrokerId()); if (!leaderBrokerActive) { makeLoadManagerDecisionOnThisBroker = true; if (currentLeader.isEmpty()) { LOG.warn( "The information about the current leader broker wasn't available. " + "Handling load manager decisions in a decentralized way. " + "NamespaceBundle[{}]", bundle); } else { LOG.warn( "The current leader broker {} isn't active. " + "Handling load manager decisions in a decentralized way. " + "NamespaceBundle[{}]", currentLeader.get(), bundle); } } } if (makeLoadManagerDecisionOnThisBroker) { Optional<String> availableBroker = getLeastLoadedFromLoadManager(bundle); if (availableBroker.isEmpty()) { LOG.warn("Load manager didn't return any available broker. " + "Returning empty result to lookup. NamespaceBundle[{}]", bundle); lookupFuture.complete(Optional.empty()); return; } candidateBroker = availableBroker.get(); authoritativeRedirect = true; } else { // forward to leader broker to make assignment candidateBroker = currentLeader.get().getBrokerId(); } } } } catch (Exception e) { LOG.warn("Error when searching for candidate broker to acquire {}: {}", bundle, e.getMessage(), e); lookupFuture.completeExceptionally(e); return; } try { Objects.requireNonNull(candidateBroker); if (candidateBroker.equals(pulsar.getBrokerId())) { // Load manager decided that the local broker should try to become the owner ownershipCache.tryAcquiringOwnership(bundle).thenAccept(ownerInfo -> { if (ownerInfo.isDisabled()) { if (LOG.isDebugEnabled()) { LOG.debug("Namespace bundle {} is currently being unloaded", bundle); } lookupFuture.completeExceptionally(new IllegalStateException( String.format("Namespace bundle %s is currently being unloaded", bundle))); } else { // Found owner for the namespace bundle if (options.isLoadTopicsInBundle()) { // Schedule the task to preload topics pulsar.loadNamespaceTopics(bundle); } // find the target if (options.hasAdvertisedListenerName()) { AdvertisedListener listener = ownerInfo.getAdvertisedListeners().get(options.getAdvertisedListenerName()); if (listener == null) { lookupFuture.completeExceptionally( new PulsarServerException("the broker do not have " + options.getAdvertisedListenerName() + " listener")); } else { URI url = listener.getBrokerServiceUrl(); URI urlTls = listener.getBrokerServiceUrlTls(); lookupFuture.complete(Optional.of( new LookupResult(ownerInfo, url == null ? null : url.toString(), urlTls == null ? null : urlTls.toString()))); } } else { lookupFuture.complete(Optional.of(new LookupResult(ownerInfo))); } } }).exceptionally(exception -> { LOG.warn("Failed to acquire ownership for namespace bundle {}: {}", bundle, exception); lookupFuture.completeExceptionally(new PulsarServerException( "Failed to acquire ownership for namespace bundle " + bundle, exception)); return null; }); } else { // Load managed decider some other broker should try to acquire ownership if (LOG.isDebugEnabled()) { LOG.debug("Redirecting to broker {} to acquire ownership of bundle {}", candidateBroker, bundle); } // Now setting the redirect url createLookupResult(candidateBroker, authoritativeRedirect, options.getAdvertisedListenerName()) .thenAccept(lookupResult -> lookupFuture.complete(Optional.of(lookupResult))) .exceptionally(ex -> { lookupFuture.completeExceptionally(ex); return null; }); } } catch (Exception e) { LOG.warn("Error in trying to acquire namespace bundle ownership for {}: {}", bundle, e.getMessage(), e); lookupFuture.completeExceptionally(e); } } public CompletableFuture<LookupResult> createLookupResult(String candidateBroker, boolean authoritativeRedirect, final String advertisedListenerName) { CompletableFuture<LookupResult> lookupFuture = new CompletableFuture<>(); try { checkArgument(StringUtils.isNotBlank(candidateBroker), "Lookup broker can't be null %s", candidateBroker); String path = LoadManager.LOADBALANCE_BROKERS_ROOT + "/" + candidateBroker; localBrokerDataCache.get(path).thenAccept(reportData -> { if (reportData.isPresent()) { LocalBrokerData lookupData = reportData.get(); if (StringUtils.isNotBlank(advertisedListenerName)) { AdvertisedListener listener = lookupData.getAdvertisedListeners().get(advertisedListenerName); if (listener == null) { lookupFuture.completeExceptionally( new PulsarServerException( "the broker do not have " + advertisedListenerName + " listener")); } else { URI url = listener.getBrokerServiceUrl(); URI urlTls = listener.getBrokerServiceUrlTls(); lookupFuture.complete(new LookupResult(lookupData.getWebServiceUrl(), lookupData.getWebServiceUrlTls(), url == null ? null : url.toString(), urlTls == null ? null : urlTls.toString(), authoritativeRedirect)); } } else { lookupFuture.complete(new LookupResult(lookupData.getWebServiceUrl(), lookupData.getWebServiceUrlTls(), lookupData.getPulsarServiceUrl(), lookupData.getPulsarServiceUrlTls(), authoritativeRedirect)); } } else { lookupFuture.completeExceptionally(new MetadataStoreException.NotFoundException(path)); } }).exceptionally(ex -> { lookupFuture.completeExceptionally(ex); return null; }); } catch (Exception e) { lookupFuture.completeExceptionally(e); } return lookupFuture; } public boolean isBrokerActive(String candidateBroker) { Set<String> availableBrokers = getAvailableBrokers(); if (availableBrokers.contains(candidateBroker)) { if (LOG.isDebugEnabled()) { LOG.debug("Broker {} is available for.", candidateBroker); } return true; } else { LOG.warn("Broker {} couldn't be found in available brokers {}", candidateBroker, String.join(",", availableBrokers)); return false; } } private Set<String> getAvailableBrokers() { try { return loadManager.get().getAvailableBrokers(); } catch (Exception e) { throw new RuntimeException(e); } } /** * Helper function to encapsulate the logic to invoke between old and new load manager. * * @param serviceUnit the service unit * @return the least loaded broker addresses * @throws Exception if an error occurs */ private Optional<String> getLeastLoadedFromLoadManager(ServiceUnitId serviceUnit) throws Exception { Optional<ResourceUnit> leastLoadedBroker = loadManager.get().getLeastLoaded(serviceUnit); if (leastLoadedBroker.isEmpty()) { LOG.warn("No broker is available for {}", serviceUnit); return Optional.empty(); } String lookupAddress = leastLoadedBroker.get().getResourceId(); if (LOG.isDebugEnabled()) { LOG.debug("{} : redirecting to the least loaded broker, lookup address={}", pulsar.getBrokerId(), lookupAddress); } return Optional.of(lookupAddress); } public CompletableFuture<Void> unloadNamespaceBundle(NamespaceBundle bundle) { return unloadNamespaceBundle(bundle, Optional.empty()); } public CompletableFuture<Void> unloadNamespaceBundle(NamespaceBundle bundle, Optional<String> destinationBroker) { // unload namespace bundle return unloadNamespaceBundle(bundle, destinationBroker, config.getNamespaceBundleUnloadingTimeoutMs(), TimeUnit.MILLISECONDS); } public CompletableFuture<Void> unloadNamespaceBundle(NamespaceBundle bundle, Optional<String> destinationBroker, long timeout, TimeUnit timeoutUnit) { return unloadNamespaceBundle(bundle, destinationBroker, timeout, timeoutUnit, true); } public CompletableFuture<Void> unloadNamespaceBundle(NamespaceBundle bundle, long timeout, TimeUnit timeoutUnit) { return unloadNamespaceBundle(bundle, Optional.empty(), timeout, timeoutUnit, true); } public CompletableFuture<Void> unloadNamespaceBundle(NamespaceBundle bundle, long timeout, TimeUnit timeoutUnit, boolean closeWithoutWaitingClientDisconnect) { return unloadNamespaceBundle(bundle, Optional.empty(), timeout, timeoutUnit, closeWithoutWaitingClientDisconnect); } public CompletableFuture<Void> unloadNamespaceBundle(NamespaceBundle bundle, Optional<String> destinationBroker, long timeout, TimeUnit timeoutUnit, boolean closeWithoutWaitingClientDisconnect) { if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { return ExtensibleLoadManagerImpl.get(loadManager.get()) .unloadNamespaceBundleAsync(bundle, destinationBroker, false, timeout, timeoutUnit); } // unload namespace bundle OwnedBundle ob = ownershipCache.getOwnedBundle(bundle); if (ob == null) { return FutureUtil.failedFuture(new IllegalStateException("Bundle " + bundle + " is not currently owned")); } else { return ob.handleUnloadRequest(pulsar, timeout, timeoutUnit, closeWithoutWaitingClientDisconnect); } } public CompletableFuture<Boolean> isNamespaceBundleOwned(NamespaceBundle bundle) { if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { ExtensibleLoadManagerImpl extensibleLoadManager = ExtensibleLoadManagerImpl.get(loadManager.get()); return extensibleLoadManager.getOwnershipAsync(Optional.empty(), bundle) .thenApply(Optional::isPresent); } return pulsar.getLocalMetadataStore().exists(ServiceUnitUtils.path(bundle)); } public CompletableFuture<Map<String, NamespaceOwnershipStatus>> getOwnedNameSpacesStatusAsync() { return pulsar.getPulsarResources().getNamespaceResources().getIsolationPolicies() .getIsolationDataPoliciesAsync(pulsar.getConfiguration().getClusterName()) .thenApply(nsIsolationPoliciesOpt -> nsIsolationPoliciesOpt.orElseGet(NamespaceIsolationPolicies::new)) .thenCompose(namespaceIsolationPolicies -> { if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { ExtensibleLoadManagerImpl extensibleLoadManager = ExtensibleLoadManagerImpl.get(loadManager.get()); return extensibleLoadManager.getOwnedServiceUnitsAsync() .thenApply(OwnedServiceUnits -> OwnedServiceUnits.stream() .collect(Collectors.toMap(NamespaceBundle::toString, bundle -> getNamespaceOwnershipStatus(true, namespaceIsolationPolicies.getPolicyByNamespace( bundle.getNamespaceObject()))))); } Collection<CompletableFuture<OwnedBundle>> futures = ownershipCache.getOwnedBundlesAsync().values(); return FutureUtil.waitForAll(futures) .thenApply(__ -> futures.stream() .map(CompletableFuture::join) .collect(Collectors.toMap(bundle -> bundle.getNamespaceBundle().toString(), bundle -> getNamespaceOwnershipStatus(bundle.isActive(), namespaceIsolationPolicies.getPolicyByNamespace( bundle.getNamespaceBundle().getNamespaceObject())) )) ); }); } private NamespaceOwnershipStatus getNamespaceOwnershipStatus(boolean isActive, NamespaceIsolationPolicy nsIsolationPolicy) { NamespaceOwnershipStatus nsOwnedStatus = new NamespaceOwnershipStatus(BrokerAssignment.shared, false, isActive); if (nsIsolationPolicy == null) { // no matching policy found, this namespace must be an uncontrolled one and using shared broker return nsOwnedStatus; } // found corresponding policy, set the status to controlled nsOwnedStatus.is_controlled = true; if (nsIsolationPolicy.isPrimaryBroker(pulsar.getAdvertisedAddress())) { nsOwnedStatus.broker_assignment = BrokerAssignment.primary; } else if (nsIsolationPolicy.isSecondaryBroker(pulsar.getAdvertisedAddress())) { nsOwnedStatus.broker_assignment = BrokerAssignment.secondary; } return nsOwnedStatus; } public boolean isNamespaceBundleDisabled(NamespaceBundle bundle) throws Exception { try { // Does ZooKeeper say that the namespace is disabled? CompletableFuture<Optional<NamespaceEphemeralData>> nsDataFuture = ownershipCache.getOwnerAsync(bundle); if (nsDataFuture != null) { Optional<NamespaceEphemeralData> nsData = nsDataFuture.getNow(null); if (nsData != null && nsData.isPresent()) { return nsData.get().isDisabled(); } else { return false; } } else { // if namespace is not owned, it is not considered disabled return false; } } catch (Exception e) { LOG.warn("Exception in getting ownership info for service unit {}: {}", bundle, e.getMessage(), e); } return false; } /** * 1. split the given bundle into two bundles 2. assign ownership of both the bundles to current broker 3. update * policies with newly created bundles into LocalZK 4. disable original bundle and refresh the cache. * <p> * It will call splitAndOwnBundleOnceAndRetry to do the real retry work, which will retry "retryTimes". * * @param bundle the bundle to split * @param unload whether to unload the new split bundles * @param splitAlgorithm the algorithm to split the bundle * @param boundaries the boundaries to split the bundle * @return a future that will complete when the bundle is split and owned */ public CompletableFuture<Void> splitAndOwnBundle(NamespaceBundle bundle, boolean unload, NamespaceBundleSplitAlgorithm splitAlgorithm, List<Long> boundaries) { if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { return ExtensibleLoadManagerImpl.get(loadManager.get()) .splitNamespaceBundleAsync(bundle, splitAlgorithm, boundaries); } final CompletableFuture<Void> unloadFuture = new CompletableFuture<>(); final AtomicInteger counter = new AtomicInteger(BUNDLE_SPLIT_RETRY_LIMIT); splitAndOwnBundleOnceAndRetry(bundle, unload, counter, unloadFuture, splitAlgorithm, boundaries); return unloadFuture; } void splitAndOwnBundleOnceAndRetry(NamespaceBundle bundle, boolean unload, AtomicInteger counter, CompletableFuture<Void> completionFuture, NamespaceBundleSplitAlgorithm splitAlgorithm, List<Long> boundaries) { BundleSplitOption bundleSplitOption = getBundleSplitOption(bundle, boundaries, config); splitAlgorithm.getSplitBoundary(bundleSplitOption).whenComplete((splitBoundaries, ex) -> { CompletableFuture<List<NamespaceBundle>> updateFuture = new CompletableFuture<>(); if (ex == null) { if (splitBoundaries == null || splitBoundaries.size() == 0) { LOG.info("[{}] No valid boundary found in {} to split bundle {}", bundle.getNamespaceObject().toString(), boundaries, bundle.getBundleRange()); completionFuture.complete(null); return; } try { bundleFactory.splitBundles(bundle, splitBoundaries.size() + 1, splitBoundaries) .thenAccept(splitBundles -> { // Split and updateNamespaceBundles. Update may fail because of concurrent write to // Zookeeper. if (splitBundles == null) { String msg = format("bundle %s not found under namespace", bundle.toString()); LOG.warn(msg); updateFuture.completeExceptionally(new ServiceUnitNotReadyException(msg)); return; } Objects.requireNonNull(splitBundles.getLeft()); Objects.requireNonNull(splitBundles.getRight()); checkArgument(splitBundles.getRight().size() == splitBoundaries.size() + 1, "bundle has to be split in " + (splitBoundaries.size() + 1) + " bundles"); NamespaceName nsname = bundle.getNamespaceObject(); if (LOG.isDebugEnabled()) { LOG.debug("[{}] splitAndOwnBundleOnce: {}, counter: {}, bundles: {}", nsname.toString(), bundle.getBundleRange(), counter.get(), splitBundles.getRight()); } try { // take ownership of newly split bundles for (NamespaceBundle sBundle : splitBundles.getRight()) { Objects.requireNonNull(ownershipCache.tryAcquiringOwnership(sBundle)); } updateNamespaceBundles(nsname, splitBundles.getLeft()).thenCompose(__ -> updateNamespaceBundlesForPolicies(nsname, splitBundles.getLeft())) .thenRun(() -> { bundleFactory.invalidateBundleCache(bundle.getNamespaceObject()); updateFuture.complete(splitBundles.getRight()); }).exceptionally(ex1 -> { String msg = format("failed to update namespace policies [%s], " + "NamespaceBundle: %s due to %s", nsname.toString(), bundle.getBundleRange(), ex1.getMessage()); LOG.warn(msg); updateFuture.completeExceptionally( new ServiceUnitNotReadyException(msg, ex1.getCause())); return null; }); } catch (Exception e) { String msg = format( "failed to acquire ownership of split bundle for namespace [%s], %s", nsname.toString(), e.getMessage()); LOG.warn(msg, e); updateFuture.completeExceptionally(new ServiceUnitNotReadyException(msg, e)); } }); } catch (Exception e) { updateFuture.completeExceptionally(e); } } else { updateFuture.completeExceptionally(ex); } // If success updateNamespaceBundles, then do invalidateBundleCache and unload. // Else retry splitAndOwnBundleOnceAndRetry. updateFuture.whenCompleteAsync((r, t)-> { if (t != null) { // retry several times on BadVersion if ((t.getCause() instanceof MetadataStoreException.BadVersionException) && (counter.decrementAndGet() >= 0)) { pulsar.getExecutor().schedule(() -> pulsar.getOrderedExecutor() .execute(() -> splitAndOwnBundleOnceAndRetry( bundle, unload, counter, completionFuture, splitAlgorithm, boundaries)), 100, MILLISECONDS); } else if (t instanceof IllegalArgumentException) { completionFuture.completeExceptionally(t); } else { // Retry enough, or meet other exception String msg2 = format(" %s not success update nsBundles, counter %d, reason %s", bundle.toString(), counter.get(), t.getMessage()); LOG.warn(msg2); completionFuture.completeExceptionally(new ServiceUnitNotReadyException(msg2)); } return; } // success updateNamespaceBundles // disable old bundle in memory getOwnershipCache().updateBundleState(bundle, false) .thenRun(() -> { // update bundled_topic cache for load-report-generation pulsar.getBrokerService().refreshTopicToStatsMaps(bundle); loadManager.get().setLoadReportForceUpdateFlag(); // release old bundle from ownership cache pulsar.getNamespaceService().getOwnershipCache().removeOwnership(bundle); completionFuture.complete(null); if (unload) { // Unload new split bundles, in background. This will not // affect the split operation which is already safely completed r.forEach(this::unloadNamespaceBundle); } onNamespaceBundleSplit(bundle); }) .exceptionally(e -> { String msg1 = format( "failed to disable bundle %s under namespace [%s] with error %s", bundle.getNamespaceObject().toString(), bundle, ex.getMessage()); LOG.warn(msg1, e); completionFuture.completeExceptionally(new ServiceUnitNotReadyException(msg1)); return null; }); }, pulsar.getOrderedExecutor()); }); } /** * Get the split boundary's. * * @param bundle The bundle to split. * @param boundaries The specified positions, * use for {@link org.apache.pulsar.common.naming.SpecifiedPositionsBundleSplitAlgorithm}. * @return A pair, left is target namespace bundle, right is split bundles. */ public CompletableFuture<Pair<NamespaceBundles, List<NamespaceBundle>>> getSplitBoundary( NamespaceBundle bundle, NamespaceBundleSplitAlgorithm nsBundleSplitAlgorithm, List<Long> boundaries) { CompletableFuture<List<Long>> splitBoundary = getSplitBoundary(bundle, boundaries, nsBundleSplitAlgorithm); return splitBoundary.thenCompose(splitBoundaries -> { if (splitBoundaries == null || splitBoundaries.size() == 0) { LOG.info("[{}] No valid boundary found in {} to split bundle {}", bundle.getNamespaceObject().toString(), boundaries, bundle.getBundleRange()); return CompletableFuture.completedFuture(null); } return pulsar.getNamespaceService().getNamespaceBundleFactory() .splitBundles(bundle, splitBoundaries.size() + 1, splitBoundaries); }); } public CompletableFuture<List<Long>> getSplitBoundary( NamespaceBundle bundle, List<Long> boundaries, NamespaceBundleSplitAlgorithm nsBundleSplitAlgorithm) { BundleSplitOption bundleSplitOption = getBundleSplitOption(bundle, boundaries, config); return nsBundleSplitAlgorithm.getSplitBoundary(bundleSplitOption); } private BundleSplitOption getBundleSplitOption(NamespaceBundle bundle, List<Long> boundaries, ServiceConfiguration config) { BundleSplitOption bundleSplitOption; if (config.getDefaultNamespaceBundleSplitAlgorithm() .equals(NamespaceBundleSplitAlgorithm.FLOW_OR_QPS_EQUALLY_DIVIDE)) { Map<String, TopicStatsImpl> topicStatsMap = pulsar.getBrokerService().getTopicStats(bundle); bundleSplitOption = new FlowOrQpsEquallyDivideBundleSplitOption(this, bundle, boundaries, topicStatsMap, config.getLoadBalancerNamespaceBundleMaxMsgRate(), config.getLoadBalancerNamespaceBundleMaxBandwidthMbytes(), config.getFlowOrQpsDifferenceThresholdPercentage()); } else { bundleSplitOption = new BundleSplitOption(this, bundle, boundaries); } return bundleSplitOption; } public NamespaceBundleSplitAlgorithm getNamespaceBundleSplitAlgorithmByName(String algorithmName) { NamespaceBundleSplitAlgorithm algorithm = NamespaceBundleSplitAlgorithm.of(algorithmName); if (algorithm == null) { algorithm = NamespaceBundleSplitAlgorithm.of(pulsar.getConfig().getDefaultNamespaceBundleSplitAlgorithm()); } if (algorithm == null) { algorithm = NamespaceBundleSplitAlgorithm.RANGE_EQUALLY_DIVIDE_ALGO; } return algorithm; } /** * Update new bundle-range to admin/policies/namespace. * Update may fail because of concurrent write to Zookeeper. * * @param nsname the namespace name * @param nsBundles the new namespace bundles */ public CompletableFuture<Void> updateNamespaceBundlesForPolicies(NamespaceName nsname, NamespaceBundles nsBundles) { Objects.requireNonNull(nsname); Objects.requireNonNull(nsBundles); return pulsar.getPulsarResources().getNamespaceResources().getPoliciesAsync(nsname).thenCompose(policies -> { if (policies.isPresent()) { return pulsar.getPulsarResources().getNamespaceResources().setPoliciesAsync(nsname, oldPolicies -> { oldPolicies.bundles = nsBundles.getBundlesData(); return oldPolicies; }); } else { LOG.error("Policies of namespace {} is not exist!", nsname); Policies newPolicies = new Policies(); newPolicies.bundles = nsBundles.getBundlesData(); return pulsar.getPulsarResources().getNamespaceResources().createPoliciesAsync(nsname, newPolicies); } }); } /** * Update new bundle-range to LocalZk (create a new node if not present). * Update may fail because of concurrent write to Zookeeper. * * @param nsname the namespace name * @param nsBundles the new namespace bundles */ public CompletableFuture<Void> updateNamespaceBundles(NamespaceName nsname, NamespaceBundles nsBundles) { Objects.requireNonNull(nsname); Objects.requireNonNull(nsBundles); LocalPolicies localPolicies = nsBundles.toLocalPolicies(); return pulsar.getPulsarResources().getLocalPolicies() .setLocalPoliciesWithVersion(nsname, localPolicies, nsBundles.getVersion()); } public OwnershipCache getOwnershipCache() { return ownershipCache; } public Set<NamespaceBundle> getOwnedServiceUnits() { if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { ExtensibleLoadManagerImpl extensibleLoadManager = ExtensibleLoadManagerImpl.get(loadManager.get()); try { return extensibleLoadManager.getOwnedServiceUnitsAsync() .get(config.getMetadataStoreOperationTimeoutSeconds(), TimeUnit.SECONDS); } catch (Exception e) { throw new RuntimeException(e); } } return ownershipCache.getOwnedBundles().values().stream().map(OwnedBundle::getNamespaceBundle) .collect(Collectors.toSet()); } public boolean isServiceUnitOwned(ServiceUnitId suName) throws Exception { return isServiceUnitOwnedAsync(suName).get(config.getMetadataStoreOperationTimeoutSeconds(), TimeUnit.SECONDS); } public CompletableFuture<Boolean> isServiceUnitOwnedAsync(ServiceUnitId suName) { if (suName instanceof TopicName) { return isTopicOwnedAsync((TopicName) suName); } if (suName instanceof NamespaceName) { return isNamespaceOwnedAsync((NamespaceName) suName); } if (suName instanceof NamespaceBundle) { if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { return loadManager.get().checkOwnershipAsync(Optional.empty(), suName); } // TODO: Add unit tests cover it. return CompletableFuture.completedFuture( ownershipCache.isNamespaceBundleOwned((NamespaceBundle) suName)); } return FutureUtil.failedFuture( new IllegalArgumentException("Invalid class of NamespaceBundle: " + suName.getClass().getName())); } /** * @deprecated This method is only used in test now. */ @Deprecated public boolean isServiceUnitActive(TopicName topicName) { try { return isServiceUnitActiveAsync(topicName).get(pulsar.getConfig() .getMetadataStoreOperationTimeoutSeconds(), SECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { LOG.warn("Unable to find OwnedBundle for topic in time - [{}]", topicName, e); throw new RuntimeException(e); } } public CompletableFuture<Boolean> isServiceUnitActiveAsync(TopicName topicName) { // TODO: Add unit tests cover it. if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { return getBundleAsync(topicName) .thenCompose(bundle -> loadManager.get().checkOwnershipAsync(Optional.of(topicName), bundle)); } return getBundleAsync(topicName).thenCompose(bundle -> { Optional<CompletableFuture<OwnedBundle>> optionalFuture = ownershipCache.getOwnedBundleAsync(bundle); if (optionalFuture.isEmpty()) { return CompletableFuture.completedFuture(false); } return optionalFuture.get().thenApply(ob -> ob != null && ob.isActive()); }); } private CompletableFuture<Boolean> isNamespaceOwnedAsync(NamespaceName fqnn) { // TODO: Add unit tests cover it. if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { return getFullBundleAsync(fqnn) .thenCompose(bundle -> loadManager.get().checkOwnershipAsync(Optional.empty(), bundle)); } return getFullBundleAsync(fqnn) .thenApply(bundle -> ownershipCache.getOwnedBundle(bundle) != null); } private CompletableFuture<Boolean> isTopicOwnedAsync(TopicName topic) { // TODO: Add unit tests cover it. if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { return getBundleAsync(topic) .thenCompose(bundle -> loadManager.get().checkOwnershipAsync(Optional.of(topic), bundle)); } return getBundleAsync(topic).thenApply(ownershipCache::isNamespaceBundleOwned); } public CompletableFuture<Boolean> checkTopicOwnership(TopicName topicName) { // TODO: Add unit tests cover it. if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { return getBundleAsync(topicName) .thenCompose(bundle -> loadManager.get().checkOwnershipAsync(Optional.of(topicName), bundle)); } return getBundleAsync(topicName) .thenCompose(ownershipCache::checkOwnershipAsync); } public CompletableFuture<Void> removeOwnedServiceUnitAsync(NamespaceBundle nsBundle) { CompletableFuture<Void> future; if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { ExtensibleLoadManagerImpl extensibleLoadManager = ExtensibleLoadManagerImpl.get(loadManager.get()); future = extensibleLoadManager.unloadNamespaceBundleAsync( nsBundle, Optional.empty(), true, pulsar.getConfig().getNamespaceBundleUnloadingTimeoutMs(), TimeUnit.MILLISECONDS); } else { future = ownershipCache.removeOwnership(nsBundle); } return future.thenRun(() -> bundleFactory.invalidateBundleCache(nsBundle.getNamespaceObject())); } public void onNamespaceBundleOwned(NamespaceBundle bundle) { for (NamespaceBundleOwnershipListener bundleOwnedListener : bundleOwnershipListeners) { notifyNamespaceBundleOwnershipListener(bundle, bundleOwnedListener); } } public void onNamespaceBundleUnload(NamespaceBundle bundle) { for (NamespaceBundleOwnershipListener bundleOwnedListener : bundleOwnershipListeners) { try { if (bundleOwnedListener.test(bundle)) { bundleOwnedListener.unLoad(bundle); } } catch (Throwable t) { LOG.error("Call bundle {} ownership listener error", bundle, t); } } } public void onNamespaceBundleSplit(NamespaceBundle bundle) { for (NamespaceBundleSplitListener bundleSplitListener : bundleSplitListeners) { try { if (bundleSplitListener.test(bundle)) { bundleSplitListener.onSplit(bundle); } } catch (Throwable t) { LOG.error("Call bundle {} split listener {} error", bundle, bundleSplitListener, t); } } } public void addNamespaceBundleOwnershipListener(NamespaceBundleOwnershipListener... listeners) { Objects.requireNonNull(listeners); for (NamespaceBundleOwnershipListener listener : listeners) { if (listener != null) { bundleOwnershipListeners.add(listener); } } pulsar.runWhenReadyForIncomingRequests(() -> { try { getOwnedServiceUnits().forEach(bundle -> notifyNamespaceBundleOwnershipListener(bundle, listeners)); } catch (Exception e) { LOG.error("Failed to notify namespace bundle ownership listener", e); } }); } public void addNamespaceBundleSplitListener(NamespaceBundleSplitListener... listeners) { Objects.requireNonNull(listeners); for (NamespaceBundleSplitListener listener : listeners) { if (listener != null) { bundleSplitListeners.add(listener); } } } private void notifyNamespaceBundleOwnershipListener(NamespaceBundle bundle, NamespaceBundleOwnershipListener... listeners) { if (listeners != null) { for (NamespaceBundleOwnershipListener listener : listeners) { try { if (listener.test(bundle)) { listener.onLoad(bundle); } } catch (Throwable t) { LOG.error("Call bundle {} ownership listener error", bundle, t); } } } } public NamespaceBundleFactory getNamespaceBundleFactory() { return bundleFactory; } public ServiceUnitId getServiceUnitId(TopicName topicName) throws Exception { return getBundle(topicName); } public CompletableFuture<List<String>> getFullListOfTopics(NamespaceName namespaceName) { return getListOfPersistentTopics(namespaceName) .thenCombine(getListOfNonPersistentTopics(namespaceName), ListUtils::union); } public CompletableFuture<List<String>> getFullListOfPartitionedTopic(NamespaceName namespaceName) { NamespaceResources.PartitionedTopicResources partitionedTopicResources = pulsar.getPulsarResources().getNamespaceResources().getPartitionedTopicResources(); return partitionedTopicResources.listPartitionedTopicsAsync(namespaceName, TopicDomain.persistent) .thenCombine(partitionedTopicResources .listPartitionedTopicsAsync(namespaceName, TopicDomain.non_persistent), ListUtils::union); } public CompletableFuture<List<String>> getOwnedTopicListForNamespaceBundle(NamespaceBundle bundle) { return getFullListOfTopics(bundle.getNamespaceObject()).thenCompose(topics -> CompletableFuture.completedFuture( topics.stream() .filter(topic -> bundle.includes(TopicName.get(topic))) .collect(Collectors.toList()))) .thenCombine(getAllPartitions(bundle.getNamespaceObject()).thenCompose(topics -> CompletableFuture.completedFuture( topics.stream().filter(topic -> bundle.includes(TopicName.get(topic))) .collect(Collectors.toList()))), (left, right) -> { for (String topic : right) { if (!left.contains(topic)) { left.add(topic); } } return left; }); } /*** * Check topic exists( partitioned or non-partitioned ). */ public CompletableFuture<TopicExistsInfo> checkTopicExists(TopicName topic) { return pulsar.getBrokerService() .fetchPartitionedTopicMetadataAsync(TopicName.get(topic.toString())) .thenCompose(metadata -> { if (metadata.partitions > 0) { return CompletableFuture.completedFuture( TopicExistsInfo.newPartitionedTopicExists(metadata.partitions)); } return checkNonPartitionedTopicExists(topic) .thenApply(b -> b ? TopicExistsInfo.newNonPartitionedTopicExists() : TopicExistsInfo.newTopicNotExists()); }); } /*** * Check non-partitioned topic exists. */ public CompletableFuture<Boolean> checkNonPartitionedTopicExists(TopicName topic) { if (topic.isPersistent()) { return pulsar.getPulsarResources().getTopicResources().persistentTopicExists(topic); } else { return checkNonPersistentNonPartitionedTopicExists(topic.toString()); } } /** * Regarding non-persistent topic, we do not know whether it exists or not. Redirect the request to the ownership * broker of this topic. HTTP API has implemented the mechanism that redirect to ownership broker, so just call * HTTP API here. */ public CompletableFuture<Boolean> checkNonPersistentNonPartitionedTopicExists(String topic) { TopicName topicName = TopicName.get(topic); // "non-partitioned & non-persistent" topics only exist on the owner broker. return checkTopicOwnership(TopicName.get(topic)).thenCompose(isOwned -> { // The current broker is the owner. if (isOwned) { CompletableFuture<Optional<Topic>> nonPersistentTopicFuture = pulsar.getBrokerService() .getTopic(topic, false); if (nonPersistentTopicFuture != null) { return nonPersistentTopicFuture.thenApply(Optional::isPresent); } else { return CompletableFuture.completedFuture(false); } } // Forward to the owner broker. PulsarClientImpl pulsarClient; try { pulsarClient = (PulsarClientImpl) pulsar.getClient(); } catch (Exception ex) { // This error will never occur. log.error("{} Failed to get partition metadata due to create internal admin client fails", topic, ex); return FutureUtil.failedFuture(ex); } LookupOptions lookupOptions = LookupOptions.builder().readOnly(false).authoritative(true).build(); return getBrokerServiceUrlAsync(TopicName.get(topic), lookupOptions) .thenCompose(lookupResult -> { if (!lookupResult.isPresent()) { log.error("{} Failed to get partition metadata due can not find the owner broker", topic); return FutureUtil.failedFuture(new ServiceUnitNotReadyException( "No broker was available to own " + topicName)); } LookupData lookupData = lookupResult.get().getLookupData(); String brokerUrl; if (pulsar.getConfiguration().isBrokerClientTlsEnabled() && StringUtils.isNotEmpty(lookupData.getBrokerUrlTls())) { brokerUrl = lookupData.getBrokerUrlTls(); } else { brokerUrl = lookupData.getBrokerUrl(); } return pulsarClient.getLookup(brokerUrl) .getPartitionedTopicMetadata(topicName, false) .thenApply(metadata -> true) .exceptionallyCompose(ex -> { Throwable actEx = FutureUtil.unwrapCompletionException(ex); if (actEx instanceof PulsarClientException.NotFoundException || actEx instanceof PulsarClientException.TopicDoesNotExistException || actEx instanceof PulsarAdminException.NotFoundException) { return CompletableFuture.completedFuture(false); } else if (actEx instanceof PulsarClientException.FeatureNotSupportedException fe){ if (fe.getFailedFeatureCheck() == SupportsGetPartitionedMetadataWithoutAutoCreation) { // Since the feature PIP-344 isn't supported, restore the behavior to previous // behavior before path_to_url changes. log.info("{} Checking the existence of a non-persistent non-partitioned topic " + "was performed using the behavior prior to PIP-344 changes, " + "because the broker does not support the PIP-344 feature " + "'supports_get_partitioned_metadata_without_auto_creation'.", topic); return CompletableFuture.completedFuture(false); } else { log.error("{} Failed to get partition metadata", topic, ex); return CompletableFuture.failedFuture(ex); } } else { log.error("{} Failed to get partition metadata", topic, ex); return CompletableFuture.failedFuture(ex); } }); }); }); } public CompletableFuture<List<String>> getListOfTopics(NamespaceName namespaceName, Mode mode) { switch (mode) { case ALL: return getFullListOfTopics(namespaceName); case NON_PERSISTENT: return getListOfNonPersistentTopics(namespaceName); case PERSISTENT: default: return getListOfPersistentTopics(namespaceName); } } public CompletableFuture<List<String>> getListOfUserTopics(NamespaceName namespaceName, Mode mode) { String key = String.format("%s://%s", mode, namespaceName); final MutableBoolean initializedByCurrentThread = new MutableBoolean(); CompletableFuture<List<String>> queryRes = inProgressQueryUserTopics.computeIfAbsent(key, k -> { initializedByCurrentThread.setTrue(); return getListOfTopics(namespaceName, mode).thenApplyAsync(list -> { return TopicList.filterSystemTopic(list); }, pulsar.getExecutor()); }); if (initializedByCurrentThread.getValue()) { queryRes.whenComplete((ignore, ex) -> { inProgressQueryUserTopics.remove(key, queryRes); }); } return queryRes; } public CompletableFuture<List<String>> getAllPartitions(NamespaceName namespaceName) { return getPartitions(namespaceName, TopicDomain.persistent) .thenCombine(getPartitions(namespaceName, TopicDomain.non_persistent), ListUtils::union); } public CompletableFuture<List<String>> getPartitions(NamespaceName namespaceName, TopicDomain topicDomain) { if (LOG.isDebugEnabled()) { LOG.debug("Getting children from partitioned-topics now: {} - {}", namespaceName, topicDomain); } return pulsar.getPulsarResources().getNamespaceResources().getPartitionedTopicResources() .listPartitionedTopicsAsync(namespaceName, topicDomain) .thenCompose(topics -> { CompletableFuture<List<String>> result = new CompletableFuture<>(); List<String> resultPartitions = Collections.synchronizedList(new ArrayList<>()); if (CollectionUtils.isNotEmpty(topics)) { List<CompletableFuture<List<String>>> futures = new ArrayList<>(); for (String topic : topics) { CompletableFuture<List<String>> future = getPartitionsForTopic(TopicName.get(topic)); futures.add(future); future.thenAccept(resultPartitions::addAll); } FutureUtil.waitForAll(futures).whenComplete((v, ex) -> { if (ex != null) { result.completeExceptionally(ex); } else { result.complete(resultPartitions); } }); } else { result.complete(resultPartitions); } return result; }); } private CompletableFuture<List<String>> getPartitionsForTopic(TopicName topicName) { return pulsar.getBrokerService().fetchPartitionedTopicMetadataAsync(topicName).thenCompose(meta -> { List<String> result = new ArrayList<>(); for (int i = 0; i < meta.partitions; i++) { result.add(topicName.getPartition(i).toString()); } return CompletableFuture.completedFuture(result); }); } /*** * List persistent topics names under a namespace, the topic name contains the partition suffix. */ public CompletableFuture<List<String>> getListOfPersistentTopics(NamespaceName namespaceName) { return pulsar.getPulsarResources().getTopicResources().listPersistentTopicsAsync(namespaceName); } public CompletableFuture<List<String>> getListOfNonPersistentTopics(NamespaceName namespaceName) { return PulsarWebResource.checkLocalOrGetPeerReplicationCluster(pulsar, namespaceName, true) .thenCompose(peerClusterData -> { // if peer-cluster-data is present it means namespace is owned by that peer-cluster and request // should redirect to the peer-cluster if (peerClusterData != null) { return getNonPersistentTopicsFromPeerCluster(peerClusterData, namespaceName); } else { // Non-persistent topics don't have managed ledgers. So we have to retrieve them from local // cache. List<String> topics = new ArrayList<>(); synchronized (pulsar.getBrokerService().getMultiLayerTopicMap()) { if (pulsar.getBrokerService().getMultiLayerTopicMap() .containsKey(namespaceName.toString())) { pulsar.getBrokerService().getMultiLayerTopicMap().get(namespaceName.toString()) .forEach((__, bundle) -> bundle.forEach((topicName, topic) -> { if (topic instanceof NonPersistentTopic && ((NonPersistentTopic) topic).isActive()) { topics.add(topicName); } })); } } topics.sort(null); return CompletableFuture.completedFuture(topics); } }); } private CompletableFuture<List<String>> getNonPersistentTopicsFromPeerCluster(ClusterDataImpl peerClusterData, NamespaceName namespace) { PulsarClientImpl client = getNamespaceClient(peerClusterData); return client.getLookup().getTopicsUnderNamespace(namespace, Mode.NON_PERSISTENT, null, null) .thenApply(GetTopicsResult::getTopics); } public PulsarClientImpl getNamespaceClient(ClusterDataImpl cluster) { PulsarClientImpl client = namespaceClients.get(cluster); if (client != null) { return client; } return namespaceClients.computeIfAbsent(cluster, key -> { try { ClientBuilder clientBuilder = PulsarClient.builder() .memoryLimit(0, SizeUnit.BYTES) .enableTcpNoDelay(false) .statsInterval(0, TimeUnit.SECONDS); // Apply all arbitrary configuration. This must be called before setting any fields annotated as // @Secret on the ClientConfigurationData object because of the way they are serialized. // See path_to_url for more information. clientBuilder.loadConf(PropertiesUtils.filterAndMapProperties(config.getProperties(), "brokerClient_")); // Disabled auto release useless connection. clientBuilder.connectionMaxIdleSeconds(-1); if (pulsar.getConfiguration().isAuthenticationEnabled()) { clientBuilder.authentication(pulsar.getConfiguration().getBrokerClientAuthenticationPlugin(), pulsar.getConfiguration().getBrokerClientAuthenticationParameters()); } if (pulsar.getConfiguration().isTlsEnabled()) { clientBuilder .serviceUrl(isNotBlank(cluster.getBrokerServiceUrlTls()) ? cluster.getBrokerServiceUrlTls() : cluster.getServiceUrlTls()) .enableTls(true) .tlsTrustCertsFilePath(pulsar.getConfiguration().getBrokerClientTrustCertsFilePath()) .allowTlsInsecureConnection(pulsar.getConfiguration().isTlsAllowInsecureConnection()) .enableTlsHostnameVerification(pulsar.getConfiguration().isTlsHostnameVerificationEnabled()) .sslFactoryPlugin(pulsar.getConfiguration().getBrokerClientSslFactoryPlugin()) .sslFactoryPluginParams(pulsar.getConfiguration().getBrokerClientSslFactoryPluginParams()); } else { clientBuilder.serviceUrl(isNotBlank(cluster.getBrokerServiceUrl()) ? cluster.getBrokerServiceUrl() : cluster.getServiceUrl()); } // Share all the IO threads across broker and client connections ClientConfigurationData conf = ((ClientBuilderImpl) clientBuilder).getClientConfigurationData(); return pulsar.createClientImpl(conf); } catch (Exception e) { throw new RuntimeException(e); } }); } public CompletableFuture<Optional<NamespaceEphemeralData>> getOwnerAsync(NamespaceBundle bundle) { if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { ExtensibleLoadManagerImpl extensibleLoadManager = ExtensibleLoadManagerImpl.get(loadManager.get()); return extensibleLoadManager.getOwnershipWithLookupDataAsync(bundle) .thenCompose(lookupData -> lookupData .map(brokerLookupData -> CompletableFuture.completedFuture(Optional.of(brokerLookupData.toNamespaceEphemeralData()))) .orElseGet(() -> CompletableFuture.completedFuture(Optional.empty()))); } return ownershipCache.getOwnerAsync(bundle); } public boolean checkOwnershipPresent(NamespaceBundle bundle) throws Exception { return checkOwnershipPresentAsync(bundle).get(pulsar.getConfiguration() .getMetadataStoreOperationTimeoutSeconds(), SECONDS); } public CompletableFuture<Boolean> checkOwnershipPresentAsync(NamespaceBundle bundle) { if (ExtensibleLoadManagerImpl.isLoadManagerExtensionEnabled(pulsar)) { ExtensibleLoadManagerImpl extensibleLoadManager = ExtensibleLoadManagerImpl.get(loadManager.get()); return extensibleLoadManager.getOwnershipAsync(Optional.empty(), bundle) .thenApply(Optional::isPresent); } return getOwnerAsync(bundle).thenApply(Optional::isPresent); } public void unloadSLANamespace() throws Exception { NamespaceName namespaceName = getSLAMonitorNamespace(pulsar.getBrokerId(), config); LOG.info("Checking owner for SLA namespace {}", namespaceName); NamespaceBundle nsFullBundle = getFullBundle(namespaceName); if (!checkOwnershipPresent(nsFullBundle)) { // No one owns the namespace so no point trying to unload it // Next lookup will assign the bundle to this broker. return; } LOG.info("Trying to unload SLA namespace {}", namespaceName); PulsarAdmin adminClient = pulsar.getAdminClient(); adminClient.namespaces().unload(namespaceName.toString()); LOG.info("Namespace {} unloaded successfully", namespaceName); } public static NamespaceName getHeartbeatNamespace(String lookupBroker, ServiceConfiguration config) { return NamespaceName.get(String.format(HEARTBEAT_NAMESPACE_FMT, config.getClusterName(), lookupBroker)); } public static NamespaceName getHeartbeatNamespaceV2(String lookupBroker, ServiceConfiguration config) { return NamespaceName.get(String.format(HEARTBEAT_NAMESPACE_FMT_V2, lookupBroker)); } public static NamespaceName getSLAMonitorNamespace(String lookupBroker, ServiceConfiguration config) { return NamespaceName.get(String.format(SLA_NAMESPACE_FMT, config.getClusterName(), lookupBroker)); } public static String checkHeartbeatNamespace(ServiceUnitId ns) { Matcher m = HEARTBEAT_NAMESPACE_PATTERN.matcher(ns.getNamespaceObject().toString()); if (m.matches()) { LOG.debug("Heartbeat namespace matched the lookup namespace {}", ns.getNamespaceObject().toString()); return m.group(1); } else { return null; } } public static String checkHeartbeatNamespaceV2(ServiceUnitId ns) { Matcher m = HEARTBEAT_NAMESPACE_PATTERN_V2.matcher(ns.getNamespaceObject().toString()); if (m.matches()) { LOG.debug("Heartbeat namespace v2 matched the lookup namespace {}", ns.getNamespaceObject().toString()); return m.group(1); } else { return null; } } public static String getSLAMonitorBrokerName(ServiceUnitId ns) { Matcher m = SLA_NAMESPACE_PATTERN.matcher(ns.getNamespaceObject().toString()); if (m.matches()) { return m.group(1); } else { return null; } } public static boolean isSystemServiceNamespace(String namespace) { return SYSTEM_NAMESPACE.toString().equals(namespace) || SLA_NAMESPACE_PATTERN.matcher(namespace).matches() || HEARTBEAT_NAMESPACE_PATTERN.matcher(namespace).matches() || HEARTBEAT_NAMESPACE_PATTERN_V2.matcher(namespace).matches(); } /** * used for filtering bundles in special namespace. * @param namespace the namespace name * @return True if namespace is HEARTBEAT_NAMESPACE or SLA_NAMESPACE */ public static boolean isSLAOrHeartbeatNamespace(String namespace) { return SLA_NAMESPACE_PATTERN.matcher(namespace).matches() || HEARTBEAT_NAMESPACE_PATTERN.matcher(namespace).matches() || HEARTBEAT_NAMESPACE_PATTERN_V2.matcher(namespace).matches(); } public static boolean isHeartbeatNamespace(ServiceUnitId ns) { String namespace = ns.getNamespaceObject().toString(); return HEARTBEAT_NAMESPACE_PATTERN.matcher(namespace).matches() || HEARTBEAT_NAMESPACE_PATTERN_V2.matcher(namespace).matches(); } public boolean registerSLANamespace() throws PulsarServerException { String brokerId = pulsar.getBrokerId(); boolean isNameSpaceRegistered = registerNamespace(getSLAMonitorNamespace(brokerId, config), false); if (isNameSpaceRegistered) { if (LOG.isDebugEnabled()) { LOG.debug("Added SLA Monitoring namespace name in local cache: ns={}", getSLAMonitorNamespace(brokerId, config)); } } else if (LOG.isDebugEnabled()) { LOG.debug("SLA Monitoring not owned by the broker: ns={}", getSLAMonitorNamespace(brokerId, config)); } return isNameSpaceRegistered; } @Override public void close() { namespaceClients.forEach((cluster, client) -> { try { client.shutdown(); } catch (PulsarClientException e) { LOG.warn("Error shutting down namespace client for cluster {}", cluster, e); } }); } } ```
Strikeforce: Nashville was a mixed martial arts event produced by Strikeforce, it took place on April 17, 2010, at the Bridgestone Arena in Nashville, Tennessee, United States and broadcast domestically on CBS. Background The main event was originally rumored to be Fedor Emelianenko taking on Fabrício Werdum. However, the fight was moved to a future event. Bobby Lashley was expected to fight on the card, as the promotion had already submitted an opponent for Lashley and was awaiting from the Tennessee Athletic Commission. According to Strikeforce CEO Scott Coker, the bout would likely be the fourth bout on the CBS televised portion of the event. On April 5, Lashley confirmed that he would not appear on the card since Strikeforce officials could not guarantee an appearance on the televised portion of the card due to time constraints. The event drew an estimated 2,900,000 viewers on CBS. Nashville brawl The event was marred by a post-fight brawl following the main event. During Jake Shields' post-fight interview, Jason Miller gained access to the cage and interrupted Shields, asking, "Where's my rematch, buddy?" After both Gilbert Melendez and Shields pushed Miller away, Nick Diaz then threw the initial punch to start the full-scale brawl. Members of the Cesar Gracie Jiu-Jitsu camp, including Melendez, Nick and Nate Diaz attacked Miller, while Miller was restrained on the canvas by members of Diaz's camp. The fight was then broken up by referees, members of Dan Henderson's corner and members of security. The brawl resulted in CBS cancelling its half of Strikeforce's television contract: MMA would not return to network television until November 2011 with UFC on Fox: Velasquez vs. Dos Santos. Results See also Strikeforce List of Strikeforce champions List of Strikeforce events 2010 in Strikeforce References External links Nashville 2010 in mixed martial arts Mixed martial arts in Tennessee Events in Nashville, Tennessee Sports competitions in Nashville, Tennessee 2010 in sports in Tennessee
Joel Villarino (born February 4, 1965 in San Carlos, Negros Occidental, Philippines) is a Filipino football coach who manages Green Archers United F.C. He was also a former member of the Philippines national football team. Competitive career Villarino played for the Philippines national football team. He made an appearance at the 1998 FIFA World Cup qualifiers playing in the 0–5 loss to Qatar in September 1996. Coaching career Club Joel Villarino has coached various teams having coached Pasargad F.C., the women's squad of Global F.C., Miriam College varsity team, and the Ateneo Futsal Club by 2013. Pasargad Villarino worked with Pasargad F.C. again as their head coach debuting in a United Football League match against Stallion for Pasargad in July 2016. Kaya (2016 AFC Cup) Serving as head coach Kaya FC only for the 2016 AFC Cup, Villarino led his team in the 0-1 loss to Kitchee SC in their first fixture, hosting Maldivian club New Radiant in their second, the first AFC Cup match Kaya hosted. This was followed by a 1-0 win over Balestier Khalsa in their third game, Villarino exuberant with the victory. The Filipino coach eventually led the club to book their place in the knockout stages, euphoric with results. However, in the round-of-16, his charges were defeated by Johor Darul Takzim 7-2; in spite of the scoreline, Villarino claimed it was 'an honor' to play against JDT. Philippines women's national team Villarino was appointed as the head coach of the Philippines women's national football team in September 2008 and was tasked to mentor the squad that will compete at the 2008 AFF Women's Championship. References 1965 births Filipino football managers Living people People from Negros Occidental Philippines women's national football team managers Philippines men's international footballers Men's association football players not categorized by position Filipino men's footballers Philippines Football League managers
```html <html lang="en"> <head> <title>MIPS assembly options - Using as</title> <meta http-equiv="Content-Type" content="text/html"> <meta name="description" content="Using as"> <meta name="generator" content="makeinfo 4.11"> <link title="Top" rel="start" href="index.html#Top"> <link rel="up" href="MIPS_002dDependent.html#MIPS_002dDependent" title="MIPS-Dependent"> <link rel="prev" href="MIPS-ISA.html#MIPS-ISA" title="MIPS ISA"> <link rel="next" href="MIPS-autoextend.html#MIPS-autoextend" title="MIPS autoextend"> <link href="path_to_url" rel="generator-home" title="Texinfo Homepage"> <!-- This file documents the GNU Assembler "as". Permission is granted to copy, distribute and/or modify this document or any later version published by the Free Software Foundation; with no Invariant Sections, with no Front-Cover Texts, and with no Back-Cover Texts. A copy of the license is included in the --> <meta http-equiv="Content-Style-Type" content="text/css"> <style type="text/css"><!-- pre.display { font-family:inherit } pre.format { font-family:inherit } pre.smalldisplay { font-family:inherit; font-size:smaller } pre.smallformat { font-family:inherit; font-size:smaller } pre.smallexample { font-size:smaller } pre.smalllisp { font-size:smaller } span.sc { font-variant:small-caps } span.roman { font-family:serif; font-weight:normal; } span.sansserif { font-family:sans-serif; font-weight:normal; } --></style> </head> <body> <div class="node"> <p> <a name="MIPS-assembly-options"></a> Next:&nbsp;<a rel="next" accesskey="n" href="MIPS-autoextend.html#MIPS-autoextend">MIPS autoextend</a>, Previous:&nbsp;<a rel="previous" accesskey="p" href="MIPS-ISA.html#MIPS-ISA">MIPS ISA</a>, Up:&nbsp;<a rel="up" accesskey="u" href="MIPS_002dDependent.html#MIPS_002dDependent">MIPS-Dependent</a> <hr> </div> <h4 class="subsection">9.27.6 Directives to control code generation</h4> <p><a name="index-MIPS-directives-to-override-command-line-options-1502"></a><a name="index-g_t_0040code_007b_002emodule_007d-1503"></a>The <code>.module</code> directive allows command line options to be set directly from assembly. The format of the directive matches the <code>.set</code> directive but only those options which are relevant to a whole module are supported. The effect of a <code>.module</code> directive is the same as the corresponding command line option. Where <code>.set</code> directives support returning to a default then the <code>.module</code> directives do not as they define the defaults. <p>These module-level directives must appear first in assembly. <p>Traditional MIPS assemblers do not support this directive. <p><a name=your_sha256_hash1504"></a><a name="index-g_t_0040code_007b_002eset-insn32_007d-1505"></a><a name="index-g_t_0040code_007b_002eset-noinsn32_007d-1506"></a>The directive <code>.set insn32</code> makes the assembler only use 32-bit instruction encodings when generating code for the microMIPS processor. This directive inhibits the use of any 16-bit instructions from that point on in the assembly. The <code>.set noinsn32</code> directive allows 16-bit instructions to be accepted. <p>Traditional MIPS assemblers do not support this directive. </body></html> ```
```smalltalk /* * * * path_to_url * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either */ using Amazon.Lambda.Core; using System; using Amazon.Lambda.RuntimeSupport.Helpers; namespace Amazon.Lambda.RuntimeSupport { internal class LambdaContext : ILambdaContext { internal static readonly DateTime UnixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); private LambdaEnvironment _lambdaEnvironment; private RuntimeApiHeaders _runtimeApiHeaders; private IDateTimeHelper _dateTimeHelper; private long _deadlineMs; private int _memoryLimitInMB; private Lazy<CognitoIdentity> _cognitoIdentityLazy; private Lazy<CognitoClientContext> _cognitoClientContextLazy; private IConsoleLoggerWriter _consoleLogger; public LambdaContext(RuntimeApiHeaders runtimeApiHeaders, LambdaEnvironment lambdaEnvironment, IConsoleLoggerWriter consoleLogger) : this(runtimeApiHeaders, lambdaEnvironment, new DateTimeHelper(), consoleLogger) { } public LambdaContext(RuntimeApiHeaders runtimeApiHeaders, LambdaEnvironment lambdaEnvironment, IDateTimeHelper dateTimeHelper, IConsoleLoggerWriter consoleLogger) { _lambdaEnvironment = lambdaEnvironment; _runtimeApiHeaders = runtimeApiHeaders; _dateTimeHelper = dateTimeHelper; _consoleLogger = consoleLogger; int.TryParse(_lambdaEnvironment.FunctionMemorySize, out _memoryLimitInMB); long.TryParse(_runtimeApiHeaders.DeadlineMs, out _deadlineMs); _cognitoIdentityLazy = new Lazy<CognitoIdentity>(() => CognitoIdentity.FromJson(runtimeApiHeaders.CognitoIdentityJson)); _cognitoClientContextLazy = new Lazy<CognitoClientContext>(() => CognitoClientContext.FromJson(runtimeApiHeaders.ClientContextJson)); // set environment variable so that if the function uses the XRay client it will work correctly _lambdaEnvironment.SetXAmznTraceId(_runtimeApiHeaders.TraceId); } // TODO If/When Amazon.Lambda.Core is major versioned, add this to ILambdaContext. // Until then function code can access it via the _X_AMZN_TRACE_ID environment variable set by LambdaBootstrap. public string TraceId => _runtimeApiHeaders.TraceId; public string AwsRequestId => _runtimeApiHeaders.AwsRequestId; public IClientContext ClientContext => _cognitoClientContextLazy.Value; public string FunctionName => _lambdaEnvironment.FunctionName; public string FunctionVersion => _lambdaEnvironment.FunctionVersion; public ICognitoIdentity Identity => _cognitoIdentityLazy.Value; public string InvokedFunctionArn => _runtimeApiHeaders.InvokedFunctionArn; public ILambdaLogger Logger => new LambdaConsoleLogger(_consoleLogger); public string LogGroupName => _lambdaEnvironment.LogGroupName; public string LogStreamName => _lambdaEnvironment.LogStreamName; public int MemoryLimitInMB => _memoryLimitInMB; public TimeSpan RemainingTime => TimeSpan.FromMilliseconds(_deadlineMs - (_dateTimeHelper.UtcNow - UnixEpoch).TotalMilliseconds); } } ```
```objective-c // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef V8_INTERPRETER_BYTECODE_ARRAY_ACCESSOR_H_ #define V8_INTERPRETER_BYTECODE_ARRAY_ACCESSOR_H_ #include "src/base/optional.h" #include "src/common/globals.h" #include "src/handles/handles.h" #include "src/interpreter/bytecode-register.h" #include "src/interpreter/bytecodes.h" #include "src/objects/objects.h" #include "src/objects/smi.h" #include "src/runtime/runtime.h" namespace v8 { namespace internal { class BytecodeArray; namespace interpreter { class BytecodeArrayAccessor; struct V8_EXPORT_PRIVATE JumpTableTargetOffset { int case_value; int target_offset; }; class V8_EXPORT_PRIVATE JumpTableTargetOffsets final { public: // Minimal iterator implementation for use in ranged-for. class V8_EXPORT_PRIVATE iterator final { public: iterator(int case_value, int table_offset, int table_end, const BytecodeArrayAccessor* accessor); JumpTableTargetOffset operator*(); iterator& operator++(); bool operator!=(const iterator& other); private: void UpdateAndAdvanceToValid(); const BytecodeArrayAccessor* accessor_; Smi current_; int index_; int table_offset_; int table_end_; }; JumpTableTargetOffsets(const BytecodeArrayAccessor* accessor, int table_start, int table_size, int case_value_base); iterator begin() const; iterator end() const; int size() const; private: const BytecodeArrayAccessor* accessor_; int table_start_; int table_size_; int case_value_base_; }; class V8_EXPORT_PRIVATE AbstractBytecodeArray { public: virtual int length() const = 0; virtual int parameter_count() const = 0; virtual uint8_t get(int index) const = 0; virtual void set(int index, uint8_t value) = 0; virtual Address GetFirstBytecodeAddress() const = 0; virtual Handle<Object> GetConstantAtIndex(int index, Isolate* isolate) const = 0; virtual bool IsConstantAtIndexSmi(int index) const = 0; virtual Smi GetConstantAtIndexAsSmi(int index) const = 0; virtual ~AbstractBytecodeArray() = default; }; class V8_EXPORT_PRIVATE BytecodeArrayAccessor { public: BytecodeArrayAccessor(std::unique_ptr<AbstractBytecodeArray> bytecode_array, int initial_offset); BytecodeArrayAccessor(Handle<BytecodeArray> bytecode_array, int initial_offset); void SetOffset(int offset); void ApplyDebugBreak(); Bytecode current_bytecode() const; int current_bytecode_size() const; int current_offset() const { return bytecode_offset_; } OperandScale current_operand_scale() const { return operand_scale_; } int current_prefix_offset() const { return prefix_offset_; } AbstractBytecodeArray* bytecode_array() const { return bytecode_array_.get(); } uint32_t GetFlagOperand(int operand_index) const; uint32_t GetUnsignedImmediateOperand(int operand_index) const; int32_t GetImmediateOperand(int operand_index) const; uint32_t GetIndexOperand(int operand_index) const; FeedbackSlot GetSlotOperand(int operand_index) const; uint32_t GetRegisterCountOperand(int operand_index) const; Register GetRegisterOperand(int operand_index) const; int GetRegisterOperandRange(int operand_index) const; Runtime::FunctionId GetRuntimeIdOperand(int operand_index) const; Runtime::FunctionId GetIntrinsicIdOperand(int operand_index) const; uint32_t GetNativeContextIndexOperand(int operand_index) const; Handle<Object> GetConstantAtIndex(int offset, Isolate* isolate) const; bool IsConstantAtIndexSmi(int offset) const; Smi GetConstantAtIndexAsSmi(int offset) const; Handle<Object> GetConstantForIndexOperand(int operand_index, Isolate* isolate) const; // Returns the absolute offset of the branch target at the current bytecode. // It is an error to call this method if the bytecode is not for a jump or // conditional jump. int GetJumpTargetOffset() const; // Returns an iterator over the absolute offsets of the targets of the current // switch bytecode's jump table. It is an error to call this method if the // bytecode is not a switch. JumpTableTargetOffsets GetJumpTableTargetOffsets() const; // Returns the absolute offset of the bytecode at the given relative offset // from the current bytecode. int GetAbsoluteOffset(int relative_offset) const; bool OffsetWithinBytecode(int offset) const; std::ostream& PrintTo(std::ostream& os) const; private: bool OffsetInBounds() const; uint32_t GetUnsignedOperand(int operand_index, OperandType operand_type) const; int32_t GetSignedOperand(int operand_index, OperandType operand_type) const; void UpdateOperandScale(); std::unique_ptr<AbstractBytecodeArray> bytecode_array_; int bytecode_offset_; OperandScale operand_scale_; int prefix_offset_; DISALLOW_COPY_AND_ASSIGN(BytecodeArrayAccessor); }; } // namespace interpreter } // namespace internal } // namespace v8 #endif // V8_INTERPRETER_BYTECODE_ARRAY_ACCESSOR_H_ ```
The Thousand Talents Plan or Thousand Talents Program (TTP) (), or Overseas High-Level Talent Recruitment Programs () is a program by the central government of China to recruit experts in science and technology from abroad, principally but not exclusively from overseas Chinese communities. The current iteration of the program is called Qiming, administered by the Ministry of Industry and Information Technology. Law enforcement and counterintelligence agencies in the United States, Australia, Canada, and other countries have raised concerns about the program as a vector for intellectual property theft and espionage. Background Many Chinese students often go abroad for advanced studies and the vast majority of whom decide to remain abroad after their studies. To reverse this trend and to build the size and prestige of China's university system, the central government of China recognized the need and turned to attracting overseas Chinese and top foreign-born talent from the world's best universities. The Thousand Talent program is the most prominent of China's more than 200 talent recruitment programs. It grew out of the "Talent Superpower Strategy" of the 17th National Congress of the Chinese Communist Party (CCP) in 2007. The CCP Central Committee and State Council of the People's Republic of China elevated the program in 2010 to become the top-level award given through China's National Talent Development Plan to strengthen innovation and international competitiveness within China. In 2019, the program was re-branded as the "National High-end Foreign Experts Recruitment Plan." The CCP's United Front Work Department's Western Returned Scholars Association is the official representative body for program participants. China also administers a "Young Thousand Talents" program as a "youth" branch of the Thousand Talents Plan. It focuses on the recruitment of early career STEM scholars. Previous attempts to attract foreign scientific talent through a decentralized network of approximately 600 "talent recruitment stations" worldwide had been largely ineffective at convincing top researchers to leave developed countries permanently. Selection The Thousand Talents program primarily targets Chinese citizens who were educated in elite programs overseas and who have been successful as entrepreneurs, professionals, and researchers. The program also recognizes a small number of elite foreign-born experts with skills that are critical to China's international competitiveness in science and innovation. International experts in the latter category are typically winners of major prizes such as the Nobel Prize and the Fields Medal, and are expected first to have made internationally renowned contributions to a field of technological importance to China, and secondly to hold either a tenured position at one of the world's top universities or a senior role in an internationally important research organization. In 2013, the Junior Thousand Talent Plan was created to attract faculty members under the age of 40 who have performed high impact research at one of the world's top universities. Although these professorships can be affiliated with any university in China, they are awarded disproportionately to individuals affiliated with the most prestigious (C9 League) universities; the few individuals who receive both this and the Changjiang (Yangtze River) Scholar award are typically associated with the C9 League. The program includes two mechanisms: resources for permanent recruitment into Chinese academia, and resources for short-term appointments that typically target international experts who have full-time employment at a leading international university or research laboratory. Within a decade of the announcement of the Thousand Talents Plan in 2008, it had attracted more than 7,000 people overall. More than 1,400 people participating in the Thousand Talents Plan, including several foreigners, specialize in life sciences fields. More than 300 scientists and scholars at Australian tertiary institutions are connected to the program, according to research by Australian Strategic Policy Institute. The Thousand Talents Plan professorship is the highest academic honor awarded by the State Council, analogous to the top-level award given by the Ministry of Education. Benefits to participants The program confers the prestigious title of "Thousand Talents Plan Distinguished Professor" (千人计划特聘教授) or "Junior Thousand Talents Plan Professor" upon the selected individuals, and provides benefits including this prestigious title, high pay, and visa privileges. The program is the first ever to enable individuals of extraordinary ability to gain access to Chinese immigration visas, including "long-stay visas." The program provides a one-time bonus of 1 million RMB to select individuals, substantial resources for research and academic exchange, and assistance with housing and transportation costs. Thousand Talents scholars are eligible for high levels of Chinese government funding. Participants in the Young Thousand Talents program receive a one-time award of 500,000 RMB and start-up grants between 1 million and three million RMB. These packages are typically matched by host institutions in China or local governments. Participants also receive fringe benefits including subsidized housing and prioritization when applying for grants. Reaction Conflict of interest and fraud allegations Some Thousand Talents Plan Professors have reported fraud in the program including misappropriated grant funding, poor accommodations, and violations of research ethics. Dismissals due to undisclosed connections to the TTP have taken place. Individuals who receive either of China's two top academic awards, the Thousand Talents Professorship and the Changjiang (Yangtze River) Scholar award, have become targets for recruitment by China's wealthiest universities so frequently that the Ministry of Education issued notices in both 2013 and 2017 discouraging Chinese universities from recruiting away top talent from one another. Effectiveness assessments Evaluations of the program's efficacy and impact have been mixed. Although the program has successfully attracted top international talent to China, its efficacy in retaining these talented individuals has been questioned, with many of the most talented scientists willing to spend short periods in China but unwilling to abandon their tenured positions at major Western universities. A study published in 2023 concluded that the scholars were high (but not top) caliber and outperformed overseas peers in last-authored publications because of better access to resources in China. Foreign government reactions Canada In August 2020, Canadian Security Intelligence Service (CSIS) warned both Canadian universities and Canadian research institutions of the TTP, saying that it recruited researchers and scientists around the world to persuade them to share their research and technology — either willingly or by coercion. South Korea In June 2023, the Seoul Metropolitan Police Agency arrested a Chinese TTP researcher on espionage charges for allegedly stealing thousands of files relating to medical robot technology. United States The success of the program in recruiting U.S.-trained scientists back to China has been viewed with concern from the U.S., with a June 2018 report from the National Intelligence Council declaring an underlying motivation of the program to be “to facilitate the legal and illicit transfer of US technology, intellectual property and know-how” to China. US and Canadian authorities have asserted that China intends to use scientists who are involved with this plan to gain access to new technology for economic and military advantage. The Federal Bureau of Investigation (FBI) has indicated that foreign recruitment sponsor talent plans "to bring outside knowledge and innovation back to their countries—and sometimes that means stealing trade secrets, breaking export control laws, or violating conflict-of-interest policies to do so." In January 2020, the FBI arrested Charles M. Lieber, the chair of Harvard University's Department of Chemistry and Chemical Biology, for lying about his ties to the program, and was convicted in December 2021. In May 2020, the FBI arrested a former researcher at the Cleveland Clinic for failing to disclose ties to the Thousand Talents Program, although a year later federal prosecutors dismissed the case. In June 2020, it was reported that the National Institutes of Health had investigations into the behavior of 189 scientists. In November 2020, Song Guo Zheng, a TTP participant, pled guilty to making false claims to the FBI about his ties to the Chinese government during his employment at Ohio State University. In November 2019, the US Senate Permanent Subcommittee on Investigations and Committee on Homeland Security and Governmental Affairs held an open hearing on the China's Talent Recruitment Plans, including the TTP, and called the programs a threat to national security. The report from the hearing cited TTP contracts as violating research values, TTP members willfully failing to disclose their membership to their home institutions, and cited numerous cases against TTP members for theft of intellectual property and fraud. One TTP member stole proprietary defense information on U.S. military jet engines. The report indicated that "TTP targets U.S.-based researchers and scientists, regardless of ethnicity or citizenship, who focus on or have access to cutting-edge research and technology." In 2019, the National Institute of Health (NIH) completed a yearlong investigation into research violations. Among other issues, it noticed after going over published papers that scores of researchers revealed their affiliation with or funding from institutions in China but had failed to report those ties to their home institution or the NIH. Michael Lauer, who oversaw the extramural research program, said that while TPP is not a threat, participants should fully disclose that relationship. In September 2022, it was reported that TTP programs recruited over 150 scientists who worked on U.S. government-sponsor research at Los Alamos National Laboratory. Academia Although the program has successfully attracted top international talent to China, its efficacy in retaining these talented individuals has been questioned, with many of the most talented scientists willing to spend short periods in China but unwilling to abandon their tenured positions at major Western universities. According to academic Scott Moore, the Chinese government had been the most assertive government in the world in introducing policies like the Thousand Talents Plan to trigger “a reverse brain drain." Moore stated that while the program posed several challenges for developed democracies such as incentivizing recruited professors to improperly transfer of resources to their concurrent workplace in China, the biggest challenges had less to do with national security than to "increasingly outdated and misguided immigration policies common among developed democracies." According to Moore, the high number of participants in the program with a specialization in the life sciences prompted US policymakers to view the TTP as signaling by Beijing of its intention to “mount a full-spectrum challenge to US leadership in the biotechnology sector”, with one US policymaker saying the TPP helped build China's talent pipeline for the sector. Academics Dongbo Shi, Weichen Liu, and Yanbo Wang conducted an analysis of Young Thousand Talent program participants, and concluded "that China’s YTT program has been successful in recruiting and nurturing high-caliber scientists and that YTT scientists outperform their overseas peers in post-return publication, mainly owing to their access to greater funding and larger research teams. These results show the potential of talent programs as a policy tool for countries to attract expatriate scientists and promote their productivity." References External links Chinese Talent Program Tracker at the Center for Security and Emerging Technology Higher education in China Recruitment Brain drain Academic awards in China 2008 establishments in China Technology transfer
Dangerhouse Records was a punk music record label based in Los Angeles, California. Overview Dangerhouse was one of the first independent labels to document the burgeoning West Coast punk rock scene. Started in 1977 and collapsing by the end of 1980, it was a short-lived enterprise, which nonetheless left an indelible mark on the punk rock history. Established by David Brown and Pat "Rand" Garrett, both members of the punk rock band Black Randy and the Metrosquad, the company operated on a limited budget, supported by the more conventional typesetting and aerospace jobs of the founders. Black Randy himself got a day job in telemarketing and joined the effort as a business partner. Despite its scarce resources, Dangerhouse was notable for its production quality. They released records on many of California's finest first-wave punk bands, including X, The Eyes, The Bags, The Alley Cats, Avengers, the Weirdos, and the Dils. Discord, a lack of financial reward, and big label competition are cited as the reasons for the demise of the company. Discography In its brief existence, Dangerhouse Records put out only 14 7-inch vinyl records, one LP, and one compilation 12-inch EP. See also List of record labels Notes References External links Richardson, Ryan. Dangerhouse Records, in-depth history and complete commented discography (page 1/2). Break My Face. Frontier Records: Dangerhouse compilations. Frontier Records. Dangerhouse Records, cover art. Record Collectors of the World Unite. Dangerhouse Records, discography. Discogs. Dangerhouse Records, discography. Punky Gibbon. American record labels Defunct record labels of the United States Record labels established in 1977 Record labels disestablished in 1980 Punk record labels 1977 establishments in California Companies based in Los Angeles
```lua print("main"); require("demoA_Image"); print("end") ```
```java /** * This file is part of Skript. * * Skript is free software: you can redistribute it and/or modify * (at your option) any later version. * * Skript is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * along with Skript. If not, see <path_to_url * */ package ch.njol.skript.expressions; import org.bukkit.event.Event; import org.bukkit.inventory.meta.ItemMeta; import org.eclipse.jdt.annotation.Nullable; import ch.njol.skript.Skript; import ch.njol.skript.aliases.ItemType; import ch.njol.skript.classes.Changer; import ch.njol.skript.doc.Description; import ch.njol.skript.doc.Examples; import ch.njol.skript.doc.Name; import ch.njol.skript.doc.RequiredPlugins; import ch.njol.skript.doc.Since; import ch.njol.skript.expressions.base.SimplePropertyExpression; import ch.njol.util.coll.CollectionUtils; @Name("Custom Model Data") @Description("Get/set the CustomModelData tag for an item. (Value is an integer between 0 and 99999999)") @Examples({"set custom model data of player's tool to 3", "set {_model} to custom model data of player's tool"}) @RequiredPlugins("1.14+") @Since("2.5") public class ExprCustomModelData extends SimplePropertyExpression<ItemType, Long> { static { if (Skript.methodExists(ItemMeta.class, "hasCustomModelData")) { register(ExprCustomModelData.class, Long.class, "[custom] model data", "itemtypes"); } } @Override public Long convert(ItemType item) { ItemMeta meta = item.getItemMeta(); assert meta != null; if (meta.hasCustomModelData()) return (long) meta.getCustomModelData(); else return 0L; } @Override public Class<? extends Long> getReturnType() { return Long.class; } @Override public Class<?>[] acceptChange(Changer.ChangeMode mode) { return CollectionUtils.array(Number.class); } @Override protected String getPropertyName() { return "custom model data"; } @Override public void change(Event e, @Nullable Object[] delta, Changer.ChangeMode mode) { long data = delta == null ? 0 : ((Number) delta[0]).intValue(); if (data > 99999999 || data < 0) data = 0; for (ItemType item : getExpr().getArray(e)) { long oldData = 0; ItemMeta meta = item.getItemMeta(); if (meta.hasCustomModelData()) oldData = meta.getCustomModelData(); switch (mode) { case ADD: data = oldData + data; break; case REMOVE: data = oldData - data; break; case DELETE: case RESET: case REMOVE_ALL: data = 0; } meta.setCustomModelData((int) data); item.setItemMeta(meta); } } @Override public String toString(@Nullable Event e, boolean d) { return "custom model data of " + getExpr().toString(e, d); } } ```
Greens Norton Pocket Park is a Local Nature Reserve in Greens Norton in Northamptonshire. It is owned and managed by Green Norton Parish Council. This former brick pit has a pond, wetland, grassland and woods. There are picnic tables and benches. Fauna include barn owls, grass snakes, great crested newts and green woodpeckers. There is access by a bridleway from Bengal Lane and a footpath from Bury Hill. References Local Nature Reserves in Northamptonshire
John I (Jean I) (died 24 February 1191), Count of Alençon, son of William III Talvas, Count of Ponthieu, and Helie of Burgundy. Recognized as Count of Alençon by Henry II of England, John succeeded his father in 1171. He was a supporter of the Perseigne Abbey and the Abbey of Saint-Martin of Troarn. He married Beatrix of Maine, daughter of Elias II, Count of Maine, and Philippe, Countess of Perche. John and Beatrix had six children: John II (died May 1191), Count of Alençon, succeeded his father Robert (died 8 September 1217), Count of Alençon, succeeded John II. Married Jeanne de Preuilly, widow of Hugues V, Viscount of Châteaudun. Their daughter Mathilde (Maud) married Theobald VI, Count of Blois. Guillaume (died 1203) Ella d’Alençon, married to Hugh II, Viscount of Chatellerault Helie d’Alençon (died after May 1233), married Robert VI FitzErneis Phlippa d’Alençon (died before 1223), married first to William III of Roumare (died 1198; grandson of William de Roumare), second to William Malet, Lord of Graville, and third, to William de Préaux. Near the end of his life, he planned to participate in the Third Crusade, but died February 24, 1191, at the . He was succeeded as Count of Alençon by his son John. References Sources 1191 deaths Counts of Alençon
The 2022 NXT In Your House was the 30th In Your House professional wrestling event produced by WWE, and the third annual held for the promotion's NXT brand division. It took place on Saturday, June 4, 2022, at the WWE Performance Center in Orlando, Florida and aired on WWE's livestreaming platforms. In June 2020, WWE revived In Your House as a subseries of the NXT TakeOver series. In September 2021, however, the TakeOver series was discontinued after NXT reverted to being WWE's developmental territory. With the scheduling of this 2022 event, In Your House became its own event for NXT. Unlike the previous year's event, which also aired on pay-per-view (PPV), the 2022 event was only available via livestreaming, as beginning with Stand & Deliver in April, NXT's major events no longer air on PPV. Eight matches were contested at the event, including two dark matches that were later streamed on Level Up. All five championships exclusive to the NXT brand were contested for; two were lost while the other three were retained. In the main event, Bron Breakker defeated Joe Gacy to retain the NXT Championship. In other prominent matches, The Creed Brothers (Brutus Creed and Julius Creed) defeated Pretty Deadly (Elton Prince and Kit Wilson) to win the NXT Tag Team Championship, Carmelo Hayes defeated Cameron Grimes to win the NXT North American Championship, and in the opening bout, The D'Angelo Family (Tony D'Angelo, Channing "Stacks" Lorenzo, and Troy "Two Dimes" Donovan) defeated Legado Del Fantasma (Santos Escobar, Cruz Del Toro, and Joaquin Wilde). Production Background In Your House was a series of monthly pay-per-view (PPV) shows that were held by WWE from May 1995 to February 1999. They aired when the promotion was not holding one of its then-five major PPVs (WrestleMania, King of the Ring, SummerSlam, Survivor Series, and Royal Rumble), and were sold at a lower cost. The branding was retired following February 1999's St. Valentine's Day Massacre: In Your House event, as the company moved to install permanent names for each of its monthly PPVs. After 21 years, In Your House was revived in 2020 for WWE's NXT brand as a subseries of the NXT TakeOver series, being held annually in June. In September 2021, however, the TakeOver series was discontinued after NXT was rebranded as NXT 2.0, reverting the brand to being WWE's developmental territory. Despite TakeOver's discontinuation, the announcement of the 2022 event confirmed that In Your House would continue on as NXT's annual June event. It was scheduled to be held on Saturday, June 4, 2022, at NXT's home base of the WWE Performance Center in Orlando, Florida. It was the 30th event in the In Your House chronology and the third held under the NXT banner. Unlike the previous year, which also aired on traditional pay-per-view, the 2022 event was only available to livestream on Peacock in the United States and the WWE Network in international markets. Storylines The card included matches that resulted from scripted storylines, where wrestlers portrayed heroes, villains, or less distinguishable characters in scripted events that built tension and culminated in a wrestling match or series of matches. Results were predetermined by WWE's writers on the NXT brand, while storylines were produced on the weekly television program, NXT, and the supplementary online streaming show, Level Up. At the NXT special Spring Breakin' on May 3, Bron Breakker defeated Joe Gacy to retain the NXT Championship. Following the match, however, Breakker was attacked by hooded minions under the command of Gacy before being carried out of the building on a stretcher by the henchmen and dumped on the side of a road. The following week, Gacy offered an invitation to Breakker to join his movement. On the May 17 episode of NXT, Breakker declined. Gacy then offered Breakker a rematch for the title, adding the stipulation that Breakker would lose the title if he was disqualified due to him not being able to control his anger, making the match official for In Your House. During the NXT Stand & Deliver Kickoff pre-show, Wendy Choo cost Toxic Attraction (Gigi Dolin and Jacy Jayne) the NXT Women's Tag Team Championship. However, on the following episode of NXT, Dolin and Jayne won back the titles, due to NXT Women's Champion and Toxic Attraction leader Mandy Rose preventing interference from Choo. In the coming weeks, Choo kept targeting the stable, including an unsuccessful attempt at winning the NXT Women's Tag Team Championship. On the May 24 episode of NXT, after Choo attacked Rose after the latter's match, Rose accepted Choo's challenge for an NXT Women's Championship match, which was scheduled for In Your House in a Women's Championship Summit the next week. At NXT Stand & Deliver, Carmelo Hayes lost the NXT North American Championship to Cameron Grimes in a five-way ladder match. At Spring Breakin', Hayes failed to win the title in a triple threat match also involving Solo Sikoa, who Grimes pinned to retain the title. On the following episode of NXT, Sikoa wanted another shot at the title, and Grimes said that he would get the match after Grimes defeated Hayes at In Your House. The next day, a match between Grimes and Hayes for the title was made official for In Your House. On May 15, Pretty Deadly (Kit Wilson and Elton Prince) were scheduled to defend the NXT Tag Team Championship against The Creed Brothers (Brutus Creed and Julius Creed) at In Your House. Wilson and Prince last eliminated The Creed Brothers in a gauntlet match to win the vacant titles on the April 12 episode of NXT. On the May 31 episode of NXT, a further stipulation was added in that The Creed Brothers must leave Diamond Mine should they lose. In April, Legado Del Fantasma (Santos Escobar, Cruz Del Toro, Elektra Lopez, and Joaquin Wilde) began feuding with Tony D'Angelo, the self-proclaimed "Don of NXT" and his "family". On the April 26 episode of NXT, D'Angelo revealed his associates to be Channing "Stacks" Lorenzo and Troy "Two Dimes" Donovan, after they cost D'Angelo his match. At Spring Breakin', D'Angelo and Escobar agreed to an uneasy truce. The following week, Escobar stated that AJ Galante was fair game in the war due to his interference in the peace talks. D'Angelo responded by kidnapping Toro in his car trunk and leaving the Performance Center. A match between Escobar and D'Angelo was scheduled for the May 17 episode. During the match, a brawl between Toro, Wilde, Lorenzo, and Donovan occurred at ringside, which allowed Escobar to strike D'Angelo with brass knuckles and give Escobar the win. The following week, after Donovan and Lorenzo's match, the two teams brawled. On the May 31 episode, the two teams agreed to a six-man tag team match at In Your House where the losing team would join the winning team's stable. Reception Dave Meltzer rated Cameron Grimes vs Carmelo Hayes and Legado Del Fantasma vs Channing "Stacks", Tony, and Troy both received 3.75 stars, the highest of the night. The lowest rated match of the night was Mandy Rose vs Wendy Choo, which received 1.5 stars. Elsewhere, the women's tag title match received 2.75 stars, Pretty Deadly vs The Creeds received 2.75 stars and the main event received the same rating as the Pretty Deadly match. Aftermath New NXT North American Champion Carmelo Hayes, with Trick Williams, opened the following episode of NXT, but was quickly interrupted by Solo Sikoa, who wanted a shot at the title. Grayson Waller interrupted, stating that nobody likes Sikoa, which is why they called him "Solo". Waller, Hayes, and Williams then laid out Sikoa, leading to a 2-on-1 handicap match pitting Sikoa against Waller and Hayes being scheduled for that episode's main event. However, after making his NXT return earlier that night, Apollo Crews teamed with Sikoa to defeat Waller and Hayes. With Legado Del Fantasma (Santos Escobar, Cruz Del Toro, Joaquin Wilde, and Elektra Lopez) joining The D'Angelo Family (Tony D'Angelo, Channing "Stacks" Lorenzo, and Troy "Two Dimes" Donovan), they would accompany each other during matches with them usually losing. On the following episode of NXT, Escobar lost his match after he didn't want to use his crowbar against his opponent. On the June 21 episode (taped June 8), Escobar cost D'Angelo his NXT North American Championship match. This was Donovan's final WWE appearance, as he was released on June 12 due to a policy violation. On the June 28 episode, he was written off television when D'Angelo threw him off the bridge and into the water. D'Angelo thought that Donovan tried to steal his throne. At NXT: The Great American Bash on July 5, D'Angelo revealed that Escobar was hospitalized and the other members of Legado began working with The D'Angelo Family. Escobar returned on the August 2 episode of NXT, where he cost Lorenzo and D'Angelo their NXT Tag Team Championship match, signaling that their alliance has ended. A street fight between D'Angelo and Escobar was later scheduled for Heatwave, where D'Angelo won to end the feud and, as per the match stipulation, Escobar and the rest of Legado agreed to leave NXT if that happened. Results References External links 2022 WWE Network events June 2022 events in the United States WWE NXT In Your House Events in Orlando, Florida Professional wrestling in Orlando, Florida 2022 in professional wrestling in Florida
Perlucidus is a cloud variety that generally appears in only two cloud types, with those cloud types being altocumulus and stratocumulus, this cloud variety is easily recognizable, with its appearance being small gaps showing up in one of the cloud types that it shows up in, which let higher clouds be seen This cloud variety forms when shallow convection starts in a cloud layer that did not previously have perlucidus variety characteristics, the gaps that make the sky visible in these clouds indicate regions where air is sinking, this cloud variety may appear either as a translucent cloud or an opaque cloud See also Opacus (cloud variety) Translucidus (cloud variety) References Cloud types
```css Change the style of the decoration with `text-decoration-style` Using the `font-variant` property to transform text to small caps Load custom fonts on a web page using `@font-face` Comma-separated lists `letter-spacing` property ```
The 1982 UCLA Bruins football team was an American football team that represented the University of California, Los Angeles during the 1982 NCAA Division I-A football season. In their seventh year under head coach Terry Donahue, the Bruins compiled a 10–1–1 record (5–1–1 Pac-10), finished in first place in the Pacific-10 Conference. In the Rose Bowl on New Year's Day, the Bruins defeated Michigan of the Big Ten Conference by ten points and remained at fifth in the final AP Poll. UCLA's offensive leaders in 1982 were quarterback Tom Ramsey with 2,986 passing yards, running back Danny Andrews with 482 rushing yards, and wide receiver Cormac Carney with 779 receiving yards. Prior to this season, UCLA moved its home games to the Rose Bowl in Pasadena; they had played in the Los Angeles Memorial Coliseum since 1928, sharing with the USC Trojans. Schedule Personnel Game summaries Long Beach State JoJo Townsell 5 Rec, 133 Yds At Wisconsin Tom Ramsey completed 17 of 24 passes for 260 yards and rushed 17 times for 56 yards in the game. At Michigan Down 21–0 in the second quarter, Tom Ramsey mounted a 28-point comeback in the second and third quarters to defeat the 20th ranked Michigan team before a capacity crowd of 105,413 fans in Michigan Stadium. Colorado Cormac Carney 6 Rec, 103 Yds Arizona Tom Ramsey 345 pass yards California Tom Ramsey 322 pass yards Cormac Carney 4 Rec, 132 Yds Stanford Tom Ramsey 314 pass yards Danny Andrews 21 rushes, 148 yards Cormac Carney 6 receptions, 137 yards USC Noseguard Karl Morgan rushed in to tackle down USC quarterback Scott Tinsley, preventing him from scoring a two-pont conversion after Tinsley had thrown a pass to bring the Trojans back within a point. Linebacker Neal Dellocono was the most valuable player of the game. With the win (and Washington's loss), UCLA clinched a Rose Bowl berth. Vs. Michigan (Rose Bowl) UCLA took a 10–0 lead in the second quarter. A hard hit by UCLA defensive back Don Rogers separated Michigan quarterback Steve Smith's shoulder and knocked him out of the game. Backup David Hall got the Wolverines on the board, making the halftime score 10–7. In the third quarter, Tom Ramsey completed seven straight passes and led UCLA on a drive that was capped by Danny Andrews' nine-yard touchdown run to make the score 17–7. In the fourth quarter, UCLA got an interception inside the Michigan 20-yard line and scored again for a 24–7 lead. Michigan scored late to close the score to 24–14. 1983 NFL Draft The following players were drafted into professional football following the season. References UCLA UCLA Bruins football seasons Pac-12 Conference football champion seasons Rose Bowl champion seasons UCLA Bruins football
```objective-c #ifndef VOXEL_GPU_TASK_RUNNER_H #define VOXEL_GPU_TASK_RUNNER_H #include "../../util/containers/span.h" #include "../../util/containers/std_vector.h" #include "../../util/godot/core/packed_byte_array.h" #include "../../util/godot/core/rid.h" #include "../../util/godot/macros.h" #include "../../util/macros.h" #include "../../util/thread/mutex.h" #include "../../util/thread/semaphore.h" #include "../../util/thread/thread.h" #include <atomic> ZN_GODOT_FORWARD_DECLARE(class RenderingDevice) #ifdef ZN_GODOT_EXTENSION using namespace godot; #endif namespace zylann::voxel { class GPUStorageBufferPool; struct GPUTaskContext { RenderingDevice &rendering_device; GPUStorageBufferPool &storage_buffer_pool; // Buffer shared by multiple tasks in the current batch. // It will be downloaded in one go before collection, which is faster than downloading multiple individual buffers, // due to Godot's API only exposing blocking calls. unsigned int shared_output_buffer_begin = 0; // In bytes unsigned int shared_output_buffer_size = 0; // In bytes RID shared_output_buffer_rid; PackedByteArray downloaded_shared_output_data; GPUTaskContext(RenderingDevice &rd, GPUStorageBufferPool &sb_pool) : rendering_device(rd), storage_buffer_pool(sb_pool) {} }; class IGPUTask { public: virtual ~IGPUTask() {} virtual unsigned int get_required_shared_output_buffer_size() const { return 0; } virtual void prepare(GPUTaskContext &ctx) = 0; virtual void collect(GPUTaskContext &ctx) = 0; }; // Runs tasks that schedules compute shaders and collects their results. class GPUTaskRunner { public: GPUTaskRunner(); ~GPUTaskRunner(); void start(RenderingDevice *rd, GPUStorageBufferPool *pool); void stop(); void push(IGPUTask *task); unsigned int get_pending_task_count() const; private: void thread_func(); RenderingDevice *_rendering_device = nullptr; GPUStorageBufferPool *_storage_buffer_pool = nullptr; StdVector<IGPUTask *> _shared_tasks; Mutex _mutex; Semaphore _semaphore; // Using a thread because so far it looks like the only way to submit and receive data with RenderingDevice is to // block the calling thread and wait for the graphics card... // Since we already have a thread pool, this thread is supposed to be mostly sleeping or waiting. Thread _thread; bool _running = false; std::atomic_uint32_t _pending_count = 0; }; } // namespace zylann::voxel #endif // VOXEL_GPU_TASK_RUNNER_H ```
John McDonnell (July 2, 1938 – June 7, 2021) was a head coach for the University of Arkansas Razorbacks track team. He began as the cross country and track head coach for the university in 1972 and became head track coach in 1978. McDonnell retired after the 2008 NCAA Outdoor Championships. He is considered by many to be the single most successful head coach (any sport) in collegiate athletics history. Early life McDonnell earned his bachelor's degree from Southwestern Louisiana (now Louisiana-Lafayette) in 1969. While competing to become a six-time all-American in track and cross country at USL, he became the 1966–67 AAU 3,000-meter champion, and won the mile at the 1966 British Selection Games. He coached at New Providence (N.J.) High School (1969–70) and Lafayette (La.) High School (1971) before coming to the University of Arkansas. Coaching accomplishments at Arkansas McDonnell was hired as the cross country coach in 1972 and added the entire men's track and field program in 1978. Coach McDonnell led the track team to their first national championship at the 1984 NCAA Indoor Championships while the school was a member of the now-defunct Southwest Conference. Since then, the University of Arkansas has won 40 NCAA championships, including 11 cross country, 19 indoor track and 10 outdoor track. Other schools have won only 24 combined NCAA titles in the three sports during the same period. McDonnell's 40 national championships (which include 19 in indoor track, 10 in outdoor track and 11 in cross country) are more than any coach in any sport in the history of college athletics. The next highest is 31 by Pat Henry, former LSU and current track coach at Texas A&M University. McDonnell also won five national triple crowns (in 1984-85, 1991–92, 1992–93, 1994–95 and 1998–99). Texas-El Paso has won three national triple crowns. No other school has ever won one. In addition, McDonnell's team and individual achievements include: 20 conference triple crowns since 1982, including eight straight between 1987 and 1995 25 consecutive conference titles in cross country with indoor track and outdoor track combined from 1987 to 1995 73 conference championships in the last 77 events Arkansas has entered since 1981-1982 84 conference championships overall since 1974 including 38 in the SWC and 46 in the SEC (out of a possible 50, or 90 percent) 12 consecutive NCAA indoor track championships (1984–1995) coached 185 track All-Americans, earning 652 separate All-America honors 34 consecutive league cross country championships, including 17 straight in the SEC (1974–2007) 54 individual national champions 23 Olympians coached spanning three decades and six different Olympic Games including gold, silver and bronze medalists his 1994 indoor track squad won the national championship by the widest margins in the history of the sport as well as scored the most points (94) in the history of the NCAA event his 1994 squad scored a meet record 223 points at the SEC Outdoor Championships has been named national, regional or conference coach of the year a total of 140 times has coached 23 Olympians, including gold, silver and bronze medalists, 105 NCAA individual event champions and 331 individual event conference champions The Razorback outdoor track facility on the campus of the University of Arkansas is named in his honor. McDonnell has been inducted as a member of the United States Track Coaches Hall of Fame, the University of Arkansas Sports Hall of Honor, the Arkansas Sports Hall of Fame, the University of Southwestern Louisiana Sports Hall of Fame and the Mayo Hall of Fame. Personal life McDonnell was granted United States citizenship in 1969, the same year he graduated from the University of Southwestern Louisiana. After retiring, McDonnell enjoyed spending time on his cattle ranch in Pryor, Oklahoma. He owned over 650 head of cattle. McDonnell was also involved with several non-profit organizations, including the American Heart Association and the Central Arkansas Radiation Therapy Institute and he worked closely with the University of Arkansas for Medical Sciences to promote prostate cancer awareness. He was married to the former Ellen Elias of Bayonne, New Jersey and has two children, Heather and Sean. McDonnell died in Fayetteville on June 7, 2021. References 1938 births 2021 deaths Irish emigrants to the United States American track and field coaches Arkansas Razorbacks track and field coaches Louisiana Ragin' Cajuns men's track and field athletes People from Pryor Creek, Oklahoma Sportspeople from County Mayo Arkansas Razorbacks cross country coaches Louisiana Ragin' Cajuns men's cross country runners
```javascript var assert = require('assert') var env = require('../').env; console.log('environment: %s', env.type); var col; switch (env.type) { case 'node': col = require('./collection/node'); break; case 'mongo': col = require('./collection/mongo'); case 'browser': col = require('./collection/browser'); default: throw new Error('missing collection implementation for environment: ' + env.type); } module.exports = exports = col; ```
```xml var arr1 = [1, 3, 2]; arr1.reverse(); console.log(arr1); var arr2 = ["def", "abc", "aba", "ced", "meh"]; console.log(arr2.reverse()); var arr3 = ["abc", "def"]; console.log(arr3.reverse()); ```
The Anse Cascon Formation is a geologic formation in Quebec. It preserves fossils dating back to the Silurian period. See also List of fossiliferous stratigraphic units in Quebec References Silurian Quebec Silurian southern paleotemperate deposits Silurian southern paleotropical deposits
```asciidoc xref::overview/apoc.redis/apoc.redis.push.adoc[apoc.redis.push icon:book[]] + `apoc.redis.push(uri, key, values, \{config}) | Execute the 'LPUSH key field values' command, or the 'RPUSH' if config right=true (default)` label:procedure[] label:apoc-full[] ```
```c++ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Original code copyright 2014 Foxit Software Inc. path_to_url #include "fxjs/cjs_color.h" #include <algorithm> #include <vector> #include "core/fxge/cfx_color.h" #include "fxjs/cjs_event_context.h" #include "fxjs/cjs_eventhandler.h" #include "fxjs/cjs_object.h" #include "fxjs/cjs_runtime.h" #include "fxjs/js_define.h" const JSPropertySpec CJS_Color::PropertySpecs[] = { {"black", get_black_static, set_black_static}, {"blue", get_blue_static, set_blue_static}, {"cyan", get_cyan_static, set_cyan_static}, {"dkGray", get_dark_gray_static, set_dark_gray_static}, {"gray", get_gray_static, set_gray_static}, {"green", get_green_static, set_green_static}, {"ltGray", get_light_gray_static, set_light_gray_static}, {"magenta", get_magenta_static, set_magenta_static}, {"red", get_red_static, set_red_static}, {"transparent", get_transparent_static, set_transparent_static}, {"white", get_white_static, set_white_static}, {"yellow", get_yellow_static, set_yellow_static}}; const JSMethodSpec CJS_Color::MethodSpecs[] = {{"convert", convert_static}, {"equal", equal_static}}; int CJS_Color::ObjDefnID = -1; const char CJS_Color::kName[] = "color"; // static int CJS_Color::GetObjDefnID() { return ObjDefnID; } // static void CJS_Color::DefineJSObjects(CFXJS_Engine* pEngine) { ObjDefnID = pEngine->DefineObj(CJS_Color::kName, FXJSOBJTYPE_STATIC, JSConstructor<CJS_Color>, JSDestructor); DefineProps(pEngine, ObjDefnID, PropertySpecs); DefineMethods(pEngine, ObjDefnID, MethodSpecs); } // static v8::Local<v8::Array> CJS_Color::ConvertPWLColorToArray(CJS_Runtime* pRuntime, const CFX_Color& color) { v8::Local<v8::Array> array; switch (color.nColorType) { case CFX_Color::kTransparent: array = pRuntime->NewArray(); pRuntime->PutArrayElement(array, 0, pRuntime->NewString("T")); break; case CFX_Color::kGray: array = pRuntime->NewArray(); pRuntime->PutArrayElement(array, 0, pRuntime->NewString("G")); pRuntime->PutArrayElement(array, 1, pRuntime->NewNumber(color.fColor1)); break; case CFX_Color::kRGB: array = pRuntime->NewArray(); pRuntime->PutArrayElement(array, 0, pRuntime->NewString("RGB")); pRuntime->PutArrayElement(array, 1, pRuntime->NewNumber(color.fColor1)); pRuntime->PutArrayElement(array, 2, pRuntime->NewNumber(color.fColor2)); pRuntime->PutArrayElement(array, 3, pRuntime->NewNumber(color.fColor3)); break; case CFX_Color::kCMYK: array = pRuntime->NewArray(); pRuntime->PutArrayElement(array, 0, pRuntime->NewString("CMYK")); pRuntime->PutArrayElement(array, 1, pRuntime->NewNumber(color.fColor1)); pRuntime->PutArrayElement(array, 2, pRuntime->NewNumber(color.fColor2)); pRuntime->PutArrayElement(array, 3, pRuntime->NewNumber(color.fColor3)); pRuntime->PutArrayElement(array, 4, pRuntime->NewNumber(color.fColor4)); break; } return array; } // static CFX_Color CJS_Color::ConvertArrayToPWLColor(CJS_Runtime* pRuntime, v8::Local<v8::Array> array) { int nArrayLen = pRuntime->GetArrayLength(array); if (nArrayLen < 1) return CFX_Color(); WideString sSpace = pRuntime->ToWideString(pRuntime->GetArrayElement(array, 0)); if (sSpace.EqualsASCII("T")) return CFX_Color(CFX_Color::kTransparent); float d1 = 0; if (nArrayLen > 1) { d1 = static_cast<float>( pRuntime->ToDouble(pRuntime->GetArrayElement(array, 1))); } if (sSpace.EqualsASCII("G")) return CFX_Color(CFX_Color::kGray, d1); float d2 = 0; float d3 = 0; if (nArrayLen > 2) { d2 = static_cast<float>( pRuntime->ToDouble(pRuntime->GetArrayElement(array, 2))); } if (nArrayLen > 3) { d3 = static_cast<float>( pRuntime->ToDouble(pRuntime->GetArrayElement(array, 3))); } if (sSpace.EqualsASCII("RGB")) return CFX_Color(CFX_Color::kRGB, d1, d2, d3); float d4 = 0; if (nArrayLen > 4) { d4 = static_cast<float>( pRuntime->ToDouble(pRuntime->GetArrayElement(array, 4))); } if (sSpace.EqualsASCII("CMYK")) return CFX_Color(CFX_Color::kCMYK, d1, d2, d3, d4); return CFX_Color(); } CJS_Color::CJS_Color(v8::Local<v8::Object> pObject, CJS_Runtime* pRuntime) : CJS_Object(pObject, pRuntime), m_crTransparent(CFX_Color::kTransparent), m_crBlack(CFX_Color::kGray, 0), m_crWhite(CFX_Color::kGray, 1), m_crRed(CFX_Color::kRGB, 1, 0, 0), m_crGreen(CFX_Color::kRGB, 0, 1, 0), m_crBlue(CFX_Color::kRGB, 0, 0, 1), m_crCyan(CFX_Color::kCMYK, 1, 0, 0, 0), m_crMagenta(CFX_Color::kCMYK, 0, 1, 0, 0), m_crYellow(CFX_Color::kCMYK, 0, 0, 1, 0), m_crDKGray(CFX_Color::kGray, 0.25), m_crGray(CFX_Color::kGray, 0.5), m_crLTGray(CFX_Color::kGray, 0.75) {} CJS_Color::~CJS_Color() = default; CJS_Result CJS_Color::get_transparent(CJS_Runtime* pRuntime) { return GetPropertyHelper(pRuntime, &m_crTransparent); } CJS_Result CJS_Color::set_transparent(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp) { return SetPropertyHelper(pRuntime, vp, &m_crTransparent); } CJS_Result CJS_Color::get_black(CJS_Runtime* pRuntime) { return GetPropertyHelper(pRuntime, &m_crBlack); } CJS_Result CJS_Color::set_black(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp) { return SetPropertyHelper(pRuntime, vp, &m_crBlack); } CJS_Result CJS_Color::get_white(CJS_Runtime* pRuntime) { return GetPropertyHelper(pRuntime, &m_crWhite); } CJS_Result CJS_Color::set_white(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp) { return SetPropertyHelper(pRuntime, vp, &m_crWhite); } CJS_Result CJS_Color::get_red(CJS_Runtime* pRuntime) { return GetPropertyHelper(pRuntime, &m_crRed); } CJS_Result CJS_Color::set_red(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp) { return SetPropertyHelper(pRuntime, vp, &m_crRed); } CJS_Result CJS_Color::get_green(CJS_Runtime* pRuntime) { return GetPropertyHelper(pRuntime, &m_crGreen); } CJS_Result CJS_Color::set_green(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp) { return SetPropertyHelper(pRuntime, vp, &m_crGreen); } CJS_Result CJS_Color::get_blue(CJS_Runtime* pRuntime) { return GetPropertyHelper(pRuntime, &m_crBlue); } CJS_Result CJS_Color::set_blue(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp) { return SetPropertyHelper(pRuntime, vp, &m_crBlue); } CJS_Result CJS_Color::get_cyan(CJS_Runtime* pRuntime) { return GetPropertyHelper(pRuntime, &m_crCyan); } CJS_Result CJS_Color::set_cyan(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp) { return SetPropertyHelper(pRuntime, vp, &m_crCyan); } CJS_Result CJS_Color::get_magenta(CJS_Runtime* pRuntime) { return GetPropertyHelper(pRuntime, &m_crMagenta); } CJS_Result CJS_Color::set_magenta(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp) { return SetPropertyHelper(pRuntime, vp, &m_crMagenta); } CJS_Result CJS_Color::get_yellow(CJS_Runtime* pRuntime) { return GetPropertyHelper(pRuntime, &m_crYellow); } CJS_Result CJS_Color::set_yellow(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp) { return SetPropertyHelper(pRuntime, vp, &m_crYellow); } CJS_Result CJS_Color::get_dark_gray(CJS_Runtime* pRuntime) { return GetPropertyHelper(pRuntime, &m_crDKGray); } CJS_Result CJS_Color::set_dark_gray(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp) { return SetPropertyHelper(pRuntime, vp, &m_crDKGray); } CJS_Result CJS_Color::get_gray(CJS_Runtime* pRuntime) { return GetPropertyHelper(pRuntime, &m_crGray); } CJS_Result CJS_Color::set_gray(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp) { return SetPropertyHelper(pRuntime, vp, &m_crGray); } CJS_Result CJS_Color::get_light_gray(CJS_Runtime* pRuntime) { return GetPropertyHelper(pRuntime, &m_crLTGray); } CJS_Result CJS_Color::set_light_gray(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp) { return SetPropertyHelper(pRuntime, vp, &m_crLTGray); } CJS_Result CJS_Color::GetPropertyHelper(CJS_Runtime* pRuntime, CFX_Color* var) { v8::Local<v8::Value> array = ConvertPWLColorToArray(pRuntime, *var); if (array.IsEmpty()) return CJS_Result::Success(pRuntime->NewArray()); return CJS_Result::Success(array); } CJS_Result CJS_Color::SetPropertyHelper(CJS_Runtime* pRuntime, v8::Local<v8::Value> vp, CFX_Color* var) { if (vp.IsEmpty()) return CJS_Result::Failure(JSMessage::kParamError); if (!vp->IsArray()) return CJS_Result::Failure(JSMessage::kTypeError); *var = ConvertArrayToPWLColor(pRuntime, pRuntime->ToArray(vp)); return CJS_Result::Success(); } CJS_Result CJS_Color::convert(CJS_Runtime* pRuntime, const std::vector<v8::Local<v8::Value>>& params) { if (params.size() < 2) return CJS_Result::Failure(JSMessage::kParamError); if (params[0].IsEmpty() || !params[0]->IsArray()) return CJS_Result::Failure(JSMessage::kTypeError); WideString sDestSpace = pRuntime->ToWideString(params[1]); int nColorType = CFX_Color::kTransparent; if (sDestSpace.EqualsASCII("T")) nColorType = CFX_Color::kTransparent; else if (sDestSpace.EqualsASCII("G")) nColorType = CFX_Color::kGray; else if (sDestSpace.EqualsASCII("RGB")) nColorType = CFX_Color::kRGB; else if (sDestSpace.EqualsASCII("CMYK")) nColorType = CFX_Color::kCMYK; CFX_Color color = ConvertArrayToPWLColor(pRuntime, pRuntime->ToArray(params[0])); v8::Local<v8::Value> array = ConvertPWLColorToArray(pRuntime, color.ConvertColorType(nColorType)); if (array.IsEmpty()) return CJS_Result::Success(pRuntime->NewArray()); return CJS_Result::Success(array); } CJS_Result CJS_Color::equal(CJS_Runtime* pRuntime, const std::vector<v8::Local<v8::Value>>& params) { if (params.size() < 2) return CJS_Result::Failure(JSMessage::kParamError); if (params[0].IsEmpty() || !params[0]->IsArray() || params[1].IsEmpty() || !params[1]->IsArray()) { return CJS_Result::Failure(JSMessage::kTypeError); } CFX_Color color1 = ConvertArrayToPWLColor(pRuntime, pRuntime->ToArray(params[0])); CFX_Color color2 = ConvertArrayToPWLColor(pRuntime, pRuntime->ToArray(params[1])); // Relies on higher values having more components. int32_t best = std::max(color1.nColorType, color2.nColorType); return CJS_Result::Success(pRuntime->NewBoolean( color1.ConvertColorType(best) == color2.ConvertColorType(best))); } ```
```go // // // path_to_url // // Unless required by applicable law or agreed to in writing, software // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. package core import ( "github.com/pingcap/kvproto/pkg/pdpb" "github.com/tikv/pd/pkg/movingaverage" "github.com/tikv/pd/pkg/utils/syncutil" "github.com/tikv/pd/pkg/utils/typeutil" ) type storeStats struct { mu syncutil.RWMutex rawStats *pdpb.StoreStats // avgAvailable is used to make available smooth, aka no sudden changes. avgAvailable *movingaverage.HMA } func newStoreStats() *storeStats { return &storeStats{ rawStats: &pdpb.StoreStats{}, avgAvailable: movingaverage.NewHMA(60), // take 10 minutes sample under 10s heartbeat rate } } func (ss *storeStats) updateRawStats(rawStats *pdpb.StoreStats) { ss.mu.Lock() defer ss.mu.Unlock() ss.rawStats = rawStats if ss.avgAvailable == nil { return } ss.avgAvailable.Add(float64(rawStats.GetAvailable())) } // GetStoreStats returns the statistics information of the store. func (ss *storeStats) GetStoreStats() *pdpb.StoreStats { ss.mu.RLock() defer ss.mu.RUnlock() return ss.rawStats } // CloneStoreStats returns the statistics information cloned from the store. func (ss *storeStats) CloneStoreStats() *pdpb.StoreStats { ss.mu.RLock() stats := typeutil.DeepClone(ss.rawStats, StoreStatsFactory) ss.mu.RUnlock() return stats } // GetCapacity returns the capacity size of the store. func (ss *storeStats) GetCapacity() uint64 { ss.mu.RLock() defer ss.mu.RUnlock() return ss.rawStats.GetCapacity() } // GetAvailable returns the available size of the store. func (ss *storeStats) GetAvailable() uint64 { ss.mu.RLock() defer ss.mu.RUnlock() return ss.rawStats.GetAvailable() } // GetUsedSize returns the used size of the store. func (ss *storeStats) GetUsedSize() uint64 { ss.mu.RLock() defer ss.mu.RUnlock() return ss.rawStats.GetUsedSize() } // GetBytesWritten returns the bytes written for the store during this period. func (ss *storeStats) GetBytesWritten() uint64 { ss.mu.RLock() defer ss.mu.RUnlock() return ss.rawStats.GetBytesWritten() } // GetBytesRead returns the bytes read for the store during this period. func (ss *storeStats) GetBytesRead() uint64 { ss.mu.RLock() defer ss.mu.RUnlock() return ss.rawStats.GetBytesRead() } // GetKeysWritten returns the keys written for the store during this period. func (ss *storeStats) GetKeysWritten() uint64 { ss.mu.RLock() defer ss.mu.RUnlock() return ss.rawStats.GetKeysWritten() } // GetKeysRead returns the keys read for the store during this period. func (ss *storeStats) GetKeysRead() uint64 { ss.mu.RLock() defer ss.mu.RUnlock() return ss.rawStats.GetKeysRead() } // IsBusy returns if the store is busy. func (ss *storeStats) IsBusy() bool { ss.mu.RLock() defer ss.mu.RUnlock() return ss.rawStats.GetIsBusy() } // GetSendingSnapCount returns the current sending snapshot count of the store. func (ss *storeStats) GetSendingSnapCount() uint32 { ss.mu.RLock() defer ss.mu.RUnlock() return ss.rawStats.GetSendingSnapCount() } // GetReceivingSnapCount returns the current receiving snapshot count of the store. func (ss *storeStats) GetReceivingSnapCount() uint32 { ss.mu.RLock() defer ss.mu.RUnlock() return ss.rawStats.GetReceivingSnapCount() } // GetAvgAvailable returns available size after the spike changes has been smoothed. func (ss *storeStats) GetAvgAvailable() uint64 { ss.mu.RLock() defer ss.mu.RUnlock() if ss.avgAvailable == nil { return ss.rawStats.Available } return climp0(ss.avgAvailable.Get()) } func climp0(v float64) uint64 { if v <= 0 { return 0 } return uint64(v) } ```
```smalltalk namespace Amazon.Lambda.SimpleEmailEvents.Actions { public interface IReceiptAction { string Type { get; set; } } } ```
```xml import * as React from 'react'; import { Popover, PopoverTrigger, PopoverSurface, Button, makeStyles, Checkbox, SpinButton, Label, } from '@fluentui/react-components'; const useStyles = makeStyles({ boundary: { border: '2px dashed red', padding: '20px', width: '300px', height: '300px', display: 'flex', flexDirection: 'column', alignItems: 'end', }, trigger: { display: 'block', width: '150px', marginTop: '60px', }, }); export const OverflowBoundaryPadding = () => { const styles = useStyles(); const [boundaryRef, setBoundaryRef] = React.useState<HTMLDivElement | null>(null); const [open, setOpen] = React.useState(false); const [padding, setPadding] = React.useState(8); return ( <> <div> <Checkbox labelPosition="before" label="Open" checked={open} onChange={(e, data) => setOpen(data.checked as boolean)} /> </div> <div> <Label style={{ marginRight: 4, marginLeft: 8 }} htmlFor="boundary-padding"> Padding </Label> <SpinButton id="boundary-padding" value={padding} onChange={(e, { value }) => value && setPadding(value)} /> </div> <div ref={setBoundaryRef} className={styles.boundary}> <Popover open={open} positioning={{ overflowBoundary: boundaryRef, overflowBoundaryPadding: padding, position: 'below', align: 'start', }} > <PopoverTrigger disableButtonEnhancement> <Button className={styles.trigger}>Shorthand padding</Button> </PopoverTrigger> <PopoverSurface>10px padding from boundary</PopoverSurface> </Popover> <Popover open={open} positioning={{ overflowBoundary: boundaryRef, overflowBoundaryPadding: { end: padding, top: 0, start: 0, bottom: 0 }, position: 'below', align: 'start', }} > <PopoverTrigger disableButtonEnhancement> <Button className={styles.trigger}>Longhand padding</Button> </PopoverTrigger> <PopoverSurface>10px padding from boundary end</PopoverSurface> </Popover> </div> </> ); }; OverflowBoundaryPadding.parameters = { docs: { description: { story: [ 'The `overflowBoundaryPadding` property sets the padding between the positioned element and the', 'chosen boundary. The padding can be a shorthand number which applies for all sides, or an object', 'That explicitly sets the padding for each side.', '', '> _Design guidance recommenends using **8px** or **4px** if a padding is required._', '_Custom values are also possible but should stay within a 4px grid, please consult your_', '_designer if a custom padding is required._', ].join('\n'), }, }, }; ```
The kotsovato (Paretroplus kieneri) is a species of cichlid from northwestern Madagascar. As presently defined its range spans several river basins, but this could possibly include more than one species. It is threatened by habitat loss and competition from introduced species. This relatively elongate Paretroplus reaches about in length and is closely related to P. gymnopreopercularis, which it resembles. The specific name honours the French fisheries scientist André Kiener. References Kotsovato Freshwater fish of Madagascar Fish described in 1960 Taxonomy articles created by Polbot
Events from the year 1834 in Denmark. Incumbents Monarch – Frederick VI Prime minister – Otto Joachim Events 29 November P. Hertz is founded by Peter Hertz in Copenhagen. Undated Fædrelandet is established as a weekly journal by and Johannes Dam Hage. Henning Smith's Iron Foundry is established in Aalborg. Holmen Canal is filled out with soil. Sports Date unknown Ajax København is founded. Births 23 May Carl Bloch, painter (died 1890) 15 July – Hans Peter Johan Lyngbye (died 1920) 15 October Poul Borum, writer (died 1996) 1 December Valdemar Oldenburg, jurist and politician (died 1918) 4 December – Carl Lange, physician and psychologist (died 1900) Deaths 4 September – Peter Erasmus Müller, bishop (born 1776) References 1830s in Denmark Denmark Years of the 19th century in Denmark
Anthony of Weert was a Franciscan friar and priest who was martyred during the Dutch Revolt. Eighteen other men were martyred alongside him; they are known as the Martyrs of Gorkum. The Martyrs of Gorkum were all beatified in 1675 and then canonised by Pope Pius IX in 1867. The martyrs share a feast day on 9 July. History Anthony of Weert was a Franciscan friar and priest who belonged to the Convent of Gorcum. Anthony was taken by Calvinists alongside 18 other individuals, 11 of those were his Franciscan brothers. On 9 July 1572, they were martyred. See also Martyrs of Gorkum Franciscan Friars Dutch Revolt References Saints 16th-century Roman Catholic martyrs 16th-century Dutch Roman Catholic priests 1523 births Beatifications by Pope Clement X Canonizations by Pope Pius IX
Margaret Aull is a New Zealand painter, art manager, and curator. She works in mixed media using canvas, installations and sculpture to contribute to and comment on Māori and Pacific artistic discourse. Her work has been exhibited both nationally and internationally, showcasing work at the Casablanca Biennale, Morocco in 2018. Along with her own art practice she is an advocate for Māori arts, serving as an advisor on Te Atinga Māori Visual Arts Board, Creative New Zealand, Hamilton City Council Arts Advisory Forum and Creative Waikato's Māori Arts Advisory Group. Biography Aull studied Māori and Pacific arts at Te Wānanga o Aotearoa. In 2008 she completed her Bachelor of Media Arts at Waikato Institute of Technology. For her Masters she studied at Whitecliffe College of Arts and Design. Her thesis investigated "the notion of tapu/tabu (sacredness) in relation to objects as visual representations of ancestors and gods." Her first solo exhibition in 2008 was titled Na Kena Yali and was held at the Chartwell Gallery in Hamilton. She has produced two further solo exhibitions in 2013 and 2014. Her second solo exhibition was titled Concealed Ancestors and was shown at Papakura Art Gallery. Her third was held at OREXART in Auckland. In 2017 she exhibited alongside fellow artist-curator Nigel Borell in their joint exhibition Karanga Hokianga which was shown at Village Arts Gallery, Hokianga. In 2018 she exhibited her work in a show called “A Maternal Lens” which was held at Casablanca Biennale, Morocco. This exhibition was curated by Ema Tavola and also included the works of Julia Mage’au Gray, Leilani Kake, Kolokesa Māhina-Tuai and Vaimaila Urale. Her work in this exhibition was titled 'Tai Aroha''' and was made from Pāua. The title references a waiata that describes love like a tide that ebbs and flows. Aull describes it as "when one tide is out, the other is full; it is a synthesising relationship of how I acknowledge the two cultural foundations as a body of water." Aull was appointed to Te Ātinga in 2016, a contemporary Māori visual arts committee. As part of this committee she serves as an advocate for Māori artists. She is also part of The Veiqia Project, a creative research project investigating the practice of Veiqia that was founded in 2015. The work of the Veiqia Project includes shared research, public events and exhibitions. One of these exhibitions was iLakolako ni weniqia: a Veiquia project'' which was shown at the Physics Room in Christchurch. Aull is of Te Rarawa, Tūwharetoa and Fijian descent. References New Zealand Māori artists Te Rarawa people Ngāti Tūwharetoa people Fijian artists Living people Year of birth missing (living people)
Wōdejebato (formerly known as Sylvania) is a Cretaceous guyot or tablemount in the northern Marshall Islands, Pacific Ocean. Wōdejebato is probably a shield volcano and is connected through a submarine ridge to the smaller Pikinni Atoll southeast of the guyot; unlike Wōdejebato, Pikinni rises above sea level. The seamount rises for to depth and is formed by basaltic rocks. The name Wōdejebato refers to a sea god of Pikinni. It was probably formed by a hotspot in what is present-day French Polynesia before plate tectonics moved it to its present-day location. The Macdonald, Rarotonga, Rurutu and Society hotspots may have been involved in its formation. The first volcanic phase took place in the Cenomanian and was followed by the formation of a carbonate platform that quickly disappeared below the sea. A second volcanic episode between 85 and 78.4 million years ago (in the Campanian) led to the formation of an island. This island was eventually eroded and rudist reefs generated an atoll or atoll-like structure, covering the former island with carbonates and thus a second carbonate platform. The second carbonate platform drowned about 68 million years ago (in the Maastrichtian), perhaps because at that time it was moving through the equatorial area which may have been too hot or too nutrient-rich to support the growth of a coral reef. Thermal subsidence lowered the drowned seamount to its present depth. After a hiatus, sedimentation commenced on the seamount and led to the deposition of manganese crusts and pelagic sediments, some of which were later modified by phosphate. Name and research history Wōdejebato is also written as Wodejebato. The name of the seamount comes from Wōdejebato, the name of the most feared and respected sea god of Pikinni Atoll. Wōdejebato was formerly called Sylvania, after the , a ship which was involved in its first mapping in 1946. The seamount was discovered in 1944, and was first investigated, using mainly seismic data, during Operation Crossroads (a nuclear bomb test). Later, several times rocks were dredged from the seamount and drill cores were taken; cores 873–877 of the Ocean Drilling Program are from Wōdejebato. Geography and geology Local setting Wōdejebato lies within the Ralik Chain of islands and seamounts in the northern Marshall Islands, which consist of about three northwest-trending groups of islands of volcanic origin. Pikinni Atoll (formerly named Bikini) is located about southeast of the seamount. The seamount lies at a depth of and is about long with a flat top that narrows southeastward from over to less than . The surface of the flat top slopes inward and is covered by small depressions and knobs with an average relief of about as well as ripple marks. The flat top is surrounded by a ridge, which has a width of and an average height of . On its northern and northeastern side, this ridge is in turn surrounded by another wide slightly raised ridge. The flat top has been interpreted as a lagoon surrounded by reefs which form the inner ridge; the outer ridge appears to be a pile of skeletal sand rather than a reef and may be a spit formed by reworked material. Small mounds, probably of biological origin, are found at the margins of the seamount. The seamount is high above the sea floor and has an irregular shape, with spurs projecting from its circumference. These spurs have widths of and surface features that are distinct from those on the main flat top. The spurs appear to be rift zones, similar to these formed on Hawaii by dyke injection although some of the ridges at Wōdejebato may have a different origin. Wōdejebato appears to have four such ridges, which is more than is observed at Hawaii. One explanation is that the northwestern ridge is another seamount; another that Wōdejebato consists of more than one volcano although the relatively small size of the seamount would argue against this view. Wōdejebato's slopes descend rather steeply until, at depth, where they become more gentle, they are decorated with forms resembling cones and channels. Part of its southern flank, where there is a downdropped terrace, seems to have collapsed in the past. Another satellite volcanic cone lies north of Wōdejebato at a depth of . Wōdejebato contains a volcanic structure within a superficial sediment cap, and a free-air gravity anomaly has been observed on the seamount. Wōdejebato is connected to Pikinni by a wide, long and high submarine ridge and both volcanoes share a pedestal; Wōdejebato is the bigger of the two and its flat top has a larger surface than Pikinni's. Magnetic anomalies are also found on both volcanoes, with Wōdejebato featuring the more extensive one. Debris from these two volcanoes has formed an apron on their southwestern foot that is up to thick. The seafloor beneath Wōdejebato was formed during the Jurassic Quiet Zone over 156.9 million years ago. Farther north from Wōdejebato lies Lōjabōn-Bar seamount, and Look Guyot is due east. Wōdejebato appears to be one source of turbidites in the Nauru Basin. Regional setting The Pacific Ocean seafloor, especially the Mesozoic seafloor, contains most of the world's guyots (also known as tablemounts). These are submarine mountains which are characterized by steep slopes, a flat top and usually the presence of corals and carbonate platforms. While there are some differences to present-day reef systems, many of these seamounts were formerly atolls. Some atolls still exist, for example at Pikinni. All these structures originally formed as volcanoes in the Mesozoic ocean. Fringing reefs may have developed on the volcanoes, which then became barrier reefs as the volcano subsided and turned into an atoll. The crust underneath these seamounts tends to subside as it cools, and thus the islands and seamounts sink. Continued subsidence balanced by upward growth of the reefs led to the formation of thick carbonate platforms. Sometimes volcanic activity continued even after the formation of the atoll or atoll-like structure, and during episodes where the carbonate platforms rose above sea level, erosional features such as channels and blue holes developed. The formation of many such seamounts has been explained with the hotspot theory, which describes the formation of chains of volcanoes which get progressively older along the length of the chain, with an active volcano only at one end of the system. Seamounts and islands in the Marshall Islands do not appear to have originated from such simple age-progressive hotspot volcanism as the age progressions in the individual island and seamount chains are often inconsistent with a hotspot origin. One explanation for this contradiction may be that more than one hotspot passed through the Marshall Islands, and it is also possible that hotspot volcanism was affected by extensional deformation of the lithosphere. In the case of Wōdejebato, candidate present-day hotspots are the Macdonald hotspot which passed close to the seamount during the Aptian and Albian ages, between 115 and 94 million years ago in the early Cretaceous, and the Society hotspot and Rarotonga hotspot which approached the seamount in the late Cretaceous 85-80 million years ago, both time periods where volcanism occurred on Wōdejebato. A third hotspot which interacted with Wōdejebato is the Rurutu hotspot. The last two are the hotspots most likely to be long-lived, while many others, such as the Marquesas hotspot, were probably active discontinuously or only for brief time intervals. Based on plate motion reconstructions, the region of the Marshall Islands was located in the region of present-day French Polynesia during the time of active volcanism. Both regions have numerous island chains, anomalously shallow ocean floors and the presence of volcanoes. About eight hotspots have generated a large number of islands and seamounts in that region, with disparate geochemistries. Composition The rocks at Wōdejebato include basalt, breccia, carbonates, clay, claystone, limestone, manganese, manganese phosphate, peloid, shale and tuff; with an unusually large amount of pyroclastic rocks present. Organic material such as kerogen, peat and woody material has also been found. Ferromanganese crusts have been found on the seamount. The crusts are composed of asbolane, birnessite and buserite and contain iron and cobalt. Wōdejebato has been evaluated as a possible mining site for its mineral deposits. The limestones appear in several forms such as floatstone, grainstone, micrite, packstone, peloid and wackestone. Some grainstones and rudstones appear to be derived from algal and animal fossils. Many carbonate rocks have been altered, for example by cementation and leaching of their components and the dissolution of aragonite; in some samples up to half of all the rock has been altered. These processes are collectively known as diagenesis. Basalts at Wōdejebato mostly form an alkali basalt suite but also include ankaramite and hawaiite. The rocks contain clinopyroxene, olivine, plagioclase and pyroxene phenocrysts. Alteration has led to the formation of calcite, chabazite, chlorite, hydromica, pyrite, serpentine and smectite, and gaps and cavities in the rock have been filled by sediments. The element geochemistry of lavas from Wōdejebato resembles that of South Central Pacific islands such as Marotiri and Rarotonga and is consistent with magma sources of intraplate volcanism. Isotope ratios show affinities to those of volcanic rocks from the Macdonald, Rurutu, Rarotonga and Society hotspots; differences between isotope ratios of various stages of volcanism may reflect the passage of Wōdejebato over more than one "plumelet". Geologic history Wōdejebato formed either before or during the Santonian age (86.3 ± 0.5 – 83.6 ± 0.2 million years ago), with the Albian age (about 113 to 100.5 million years ago) being a likely candidate. Wōdejebato originated in the Southern Hemisphere and was moved by plate tectonics into the Northern Hemisphere, and paleomagnetism indicates that the seamount was located at 10 degrees southern latitude when the most recent lavas erupted. It subsequently underwent several episodes of uplift and subsidence and eventually drowned, forming the present-day seamount. Ruwitūntūn is another seamount in the Marshall Islands with a similar history. Volcanism and first biotic phenomena Volcanism at Wōdejebato appears to have occurred during two phases over a timespan of about 20 million years. The first phase took place during the Cenomanian (100.5 – 93.9 million years ago); it was characterized by explosive eruptions and may be the source of 93.9–96.3 million year old volcanic debris found in the surroundings of Wōdejebato. The second phase occurred during the Campanian between 78.4 and 85 million years ago during chron 33R; it appears to be part of a volcanic event that affected a number of other islands and seamounts in the Marshall Islands and at Wōdejebato lasted for at least four million years. The second stage appears to have been a secondary volcanic episode. Volcanic rocks sampled at Wōdejebato all belong to the second stage, probably due to sampling bias as the samples all come from the summit region. Tectonic evidence indicates that Pikinni formed at the same time as Wōdejebato, while the northern parasitic cone may be less than 80 million years old and reefs have been covered by volcanic rocks of Campanian (80 - 70 million years ago) age. An earlier proposal by Schlanger et al. 1987 envisaged Eocene (56 – 33.9 million years ago) eruptions at Wōdejebato but today the older ages are considered to be correct. The volcanic activity produced breccia and lava flows, probably first generating a shield volcano. Volcanic activity occurred both in shallow water and submarine forming hyaloclastite and highly vesicular rocks during phreatomagmatic eruptions, and above sea level as indicated by the presence of basaltic pebbles. Some early volcanic deposits were buried by later activity. There are conflicting reports about whether hydrothermal activity took place. Vegetation including ferns and fungi grew on the exposed island during the Campanian, leaving abundant wood remnants. Weathering of basaltic rocks produced clay sediments and soils thick have been obtained in drill cores. Platform carbonates and reefs After volcanic activity ceased, environmental processes transformed Wōdejebato into a flat-topped platform, equivalent to a present-day atoll, as the crust beneath Wōdejebato seamount subsided. Erosion and subsidence lowered the volcanic pile until seawater flooded it and marine sedimentation commenced. This platform phase lasted only about 10 million years and took place in at least two stages, in line with the generally short duration of such platform phases; they do not generally last longer than 20 million years. The growth of the platform was not continuous and was probably interrupted by one drowning event between the Albian and Campanian ages, similar to other seamounts in the Pacific Ocean which also drowned during this time. Limestones and carbonates forming a platform accumulated on Wōdejebato, with drill cores showing total thicknesses of . Compositionally, it consists mainly of sandy carbonates that are often leached and cemented by calcitic material. These deposits eventually covered the entire upper area of the volcanic high and formed the inner ridge. Variations in sea level occasionally led to parts of the platform either emerging above sea level or submerging, leading to erosion that generated the outer ridge and to the development of characteristic sequences within the deposits. Such carbonate platforms look like present-day atolls but unlike the biogenic frameworks of modern atolls they were formed by biogenic sediments; at Wōdejebato sandy shoals appear to have been a principal component. These carbonate deposits would then have been surrounded by a barrier reef and the redeposition, followed by stabilization, of eroded material had a role in the development of the surrounding rim. Reef mounds grew to several tens of metres in height. Foraminiferal fossil data imply that lagoonal environments existed on Wōdejebato. The central part of the guyot surface and its margins feature different platform structures, and the platform has been subdivided into several different assemblages on the basis of foraminifera stages. Environmental conditions on the platform were characterized by tropical influences. Wōdejebato was probably located in equatorial waters with temperatures likely exceeding , with temperature ranges of during the Maastrichtian. The platform was sometimes affected by storms that reworked the rock material. Soil properties imply that precipitation on Wōdejebato was less than , but erosion by precipitation water and dissolution of parts of the carbonate platform have been inferred from dissolution traces in the rocks. Sea level variations induced the formation of step-like reef tracts on Wōdejebato's carbonate platform. Much of the reefbuilding was carried out by corals, rudists and stromatoporoids. Unlike present-day coral reefs, reef building in the Cretaceous was carried out mainly by rudists which probably started appearing at Wōdejebato in the Albian; rudist taxa active at Wōdejebato included caprinids and radiolitids, such as Antillocaprina, Coralliochama, Distefanella, Mitrocaprina and Plagioptychus. Furthermore, benthic foraminifers were active from the Campanian to the Maastrichtian; they include Asterorbis, Pseudorbitoides trechmanni, Omphalocyclus macroporus and Sulcoperculina as well as other discorbids, lituolids, miliolids, opthalmiids, orbitoids, peneroplids, placopsilinids, rotaliids and textulariids. Other lifeforms that were fossilized in the carbonate reefs were algae including green algae (codiaceans and dasycladaceans) and red algae (corallinaceans, peyseonneliaceans and solenoporaceans); some algae formed rhodoliths. In addition there were bivalves (inoceramids and pycnodonts), bryozoans, corals, gastropods, echinoderms, echinoids, ostracods and sponges. Drowning and post-drowning evolution It is likely that Wōdejebato drowned during the Maastrichtian age around 68 million years ago, probably accompanied by a sea level rise of about . Before the terminal drowning, Wōdejebato's carbonate platform emerged from the sea, leading to the development of karst features; two separate emersion events took place 68 and 71 million years ago. Sea level rise on its own probably does not explain the drowning. Various paleoenvironmental stressors have been invoked to explain the drowning such as short-term climate fluctuations during the Maastrichtian and the passage of the seamount through the equatorial upwelling zone. The water in this region may have been too hot for the reef to survive: Other guyots in the Pacific Ocean such as Limalok, Lo-En and Takuyo-Daisan also drowned when they were within ten degrees from the equator on the Southern Hemisphere, implying that this region of the Pacific Ocean was in some way harmful to shallow water reefs. The subsidence that occurred after Wōdejebato moved away from the influence of the Rurutu hotspot may have also played a role. Pikinni was probably higher than Wōdejebato at this time and hence escaped drowning. After the drowning had taken place, thermal subsidence of the crust beneath Wōdejebato occurring at a rate of lowered the platform of Wōdejebato to a depth of about below sea level. Between the Maastrichtian and the Eocene, manganese crusts formed on the exposed limestones and gravels formed by erosion; in turn they were subject to alteration processes such as phosphatization during three different episodes in the Eocene. Approximately 40 million years passed between the drowning and subsequent deposition events. Pelagic sedimentation took place, which formed an ooze consisting of foraminiferal and nannofossil deposits between the Miocene and Pleistocene, with a Miocene unconformity. In one drill core, this sediment layer is thick. Currents affected mid- to late Pleistocene sedimentation. Among the foraminifera deposited here are Florisphaera, Gephyrocapsa, Globigerina, Globorotalia, Helicosphaera, Pseudoemiliania and potentially Sphaeroidinella species. Foraminifera taken from Wōdejebato usually belong to pelagic species. Ostracods have also been identified; common taxa are cytherurids as well as Bradleya, Cytheralison and Krithe species. Presently, Wōdejebato lies below the thermocline and the temperature of the water washing over the seamount is about . Circumstantial evidence indicates that deep seawater dissolved large amounts of carbonate rocks including aragonite after Wōdejebato was submerged; the seamount is located below the aragonite saturation depth and that causes the aragonite to dissolve. Some of the dissolved aragonite has precipitated again in the form of calcite, and sediments have partially filled cavities within the carbonate rocks. Notes References Sources Seamounts of the Pacific Ocean Extinct volcanoes Landforms of the Marshall Islands Mesozoic volcanoes Ralik Chain
Diadelia leucovittata is a species of beetle in the family Cerambycidae. It was described by Breuning in 1970. References Diadelia Beetles described in 1970
Archimede Mischi (Forlì, 26 March 1885 – 15 August 1970) was an Italian Blackshirt general during World War II. Biography He was born in Forlì on March 26, 1885, the son of Ulisse Mischi and Rosa Silvagni. Having enlisted in the Royal Italian Army in November 1894, he was admitted to attend the Royal Military Academy of Infantry and Cavalry of Modena, graduating with the rank of infantry second lieutenant, assigned to the 1st Grenadiers of Sardinia Regiment. In 1908 he received an honorable mention for his participation in the rescue effort after the Messina earthquake. He was promoted to lieutenant in July 1909, and starting from September 1912 he fought in Libya with the grenadiers battalion, returning to Italy in July 1913. In September of the same year he entered service at the 6th Infantry Regiment of the Aosta Infantry Brigade, and on the following 28 December he married Miss Michela Vitrano, with whom he had two children. He was then assigned to the 19th Infantry Regiment and promoted to captain in December 1914, and in the following January he was transferred to the 142nd Infantry Regiment of the Catanzaro Infantry Brigade, where he was when the Kingdom of Italy entered the First World War on May 24, 1915. On July 26 he distinguished himself in the fighting on the Karst plateau, being awarded his first Silver Medal of Military Valor and being temporarily given command of his battalion; he was awarded another Silver Medal for his role in the fighting in Bosco Cappuccio between 21 October and 1 November 1915, during the Third Battle of the Isonzo, where he was again wounded and promoted to major for war merit. On August 6, 1916 he was wounded for the third time on Monte San Michele, this earning a third silver medal and; in October of the same year, for having distinguished himself in the defense of Monte Cengio during the Battle of Asiago, he was awarded the Knight's Cross of the Military Order of Savoy. On January 26, 1917 he was transferred to the 90th Infantry Regiment of the Salerno Brigade, commanding a reinforced battalion during the attack on the enemy trenches located at Hudi Log (Monte Ermada), on the Karst plateau, being again wounded (in his left arm, which was left permanently disabled) and awarded a fourth Silver Medal for military valor. He was hospitalized and on 25 November he was promoted to lieutenant colonel; after recovering, on 3 July 1917 he was posted in Genoa, but his precarious health conditions forced him to a prolonged stay at the physiotherapy center of the VII Army Corps, which lasted from 14 August 1918 to 6 December 1919. From December 1918 to April 1919 he was part of a commission tasked with questioning repatriated prisoners of war in Ferrara. On December 6, 1919 he was assigned to the 81st Infantry Regiment "Torino" in Rome, temporarily made available to the Ministry of the Interior, after which he was temporarily transferred to the 226th Infantry Regiment on 19 October 1920 and, ten days later, to the 6th Infantry Regiment stationed in Palermo. In Palermo on 7 July 1921 he was appointed alternate judge at the Special Military Court, maintaining this position until 4 May 1924. On 22 October 1922 he was assigned to the local school for reserve officers and non-commissioned officers, where he remained until 14 March 1926, when he was placed on leave at his request. He was promoted to colonel on 2 February 1927, and on 27 April 1927 he joined the Volunteer Militia for National Security with the rank of console (equivalent to Army colonel), assuming command of the 171st Blackshirt Legion "Vespri di Palermo". He requested to be sent to the front in Libya to participate in the operations against the Senussi rebels, but his request was rejected. In 1928 he met Benito Mussolini for the first time in Rome, who tasked him with reorganizing the 82nd Blackshirt Legion "Benito Mussolini" of Forlì. In January 1929 he joined the Federal Directory, and in September of the same year he became president of the Provincial Veterans' Federation. In March 1930 he was received again by Mussolini, to whom he again expressed the desire to leave for Libya, but the Duce instead gave him command the 80th Blackshirt Legion "Alessandro Farnese" of Parma. In October 1932 he was temporarily recalled into service with the Royal Italian Army at the disposal of the Ministry of Colonies, being sent to Cyrenaica at the command of the 2nd Libyan MVSN Legion "Berenice". Military operations for the pacification of the colony were however over, and Mischi’s troops were only given garrison tasks; in September 1934 he was promoted to MVSN console generale, and on June 20, 1935 he was promoted to brigadier general of the Royal Italian Army for exceptional merits. In anticipation of the outbreak of the war with Ethiopia, on 3 June 1935 he was appointed deputy commander of the 3rd CC.NN. Division "21 Aprile", landing in Eritrea on 12 September of the same year. During the war he distinguished himself on February 29, 1936 in an action in Ahab Saat, for which he was awarded his fifth Silver Medal for military valor. After the end of hostilities he remained in Italian East Africa, taking command, on 25 September 1936, of the 6th CC.NN. Division "Tevere", participating in operations against Ethiopian troops loyal to Ras Desta Damtew. He was later promoted to luogotenente generale (Major General) of the MVSN on 21 June 1937, and Major General of the Army in November; in December 1937 he assumed the position of Inspector of the Blackshirts in Italian East Africa while also holding the command of the 6th Blackshirt Mixed Brigade "Tevere". During his stay in East Africa he also carried out political and administrative duties as commissioner, first in Mojo and then in Dessie. Returning to Italy on 1 September 1938, in May of the following year he was appointed commander of the Border Militia, the MVSN branch tasked with the surveillance of the land borders of the Kingdom of Italy in order to prevent clandestine entries and expatriations, with headquarters in Turin and four legions stationed throughout the Alps, with a total of 120 officers and 2,200 non-commissioned officers and soldiers. Except for a period between January and October 1941, when he was seconded attached to the 11th Army on the Greek-Albanian front as MVSN liaison officer, he was commander of the Border Militia until 1943. On 1 January 1942 he was promoted to Lieutenant General; during the final phase of his period of command, the border militia was engaged in numerous clashes with Yugoslav partisans in the Julian March and in the annexed areas around Fiume and Ljubljana. Shortly after the fall of Fascism, on 9 August 1943, Mischi resigned from his post as commander of the Border Militia and was placed at the disposal of the general command of the MVSN. After the Armistice of Cassibile, the German occupation of Italy and the birth of the Italian Social Republic in September 1943, he presented himself in Rome to Marshal of Italy Rodolfo Graziani, Minister of National Defense of the RSI, who on 3 October appointed him Commander of the Carabinieri. He then left the capital to talk with Mussolini, who was at the Rocca delle Caminate, and was thus absent from Rome when on 6 October the German command imposed the disarmament and internment in Germany of the Carabinieri of Rome. The news of this incident, which soon spread throughout Italy, greatly hampered his attempts to reorganize the Carabinieri units of central and northern Italy as a force loyal to the Italian Social Republic. Finally, on 8 December 1943 the Carabinieri were formally dissolved and merged with the MVSN into the new Republican National Guard. On January 2, 1944 Mischi was appointed chairman of the commission tasked with reviewing the former officers of the Royal Italian Army who wanted to pass through the newly established National Republican Army (ENR). On May 14, when the Chief of Staff of the ENR, General Gastone Gambara, fell ill, Mischi was chosen to replace him; however, having never attended the Army War School or any other specialization course, he felt unprepared to fill this position, in addition to resenting the subordinate position of the RSI armed forces towards the German commands (the ENR General Staff had no autonomy over the operational plans and the use of the troops on the front against the Allies, and its efforts were mainly oriented to the contrast of the Italian Resistance). At the end of July, while retaining the position of Chief of Staff, he received direct orders from Graziani to "normalize" Piedmont, where partisan activity was rampant, employing a division that would have been specially set up for this purpose. He thus spent the remainder of the war fighting the partisans in Piedmont, while continuing to maintain relations with the commanders of the other military areas, and trying to maintain the unity of direction of the various ENR units through a series of inspections, the last of which he held at the end of March 1945, inspecting the troops deployed along the Adriatic coast. Mischi opposed the idea of the Valtellina Redoubt, and in the days of the collapse of the Italian Social Republic he retreated from Milan to Lecco, where on 25 April 1945 he attempted suicide by cutting his wrists at the Albergo Moderno. Admitted to hospital in a state of coma, as soon as he recovered he was imprisoned in the Coltano prisoner-of-war camp and then handed over by the Allies to the Italian authorities. He was tried by the Special Court of Assizes of Turin for a series of war crimes related to his activity in Italian Social Republic, committed by troops under his command in Piedmont; he however successfully raised doubts about the impartiality of that tribunal and on 3 June 1947 he obtained the transfer of his case to the Court of Assizes of Rome. The trial was held between November 13 and December 3, 1947, with the prosecutor requesting the death penalty; he was acquitted of some charges and sentenced to eighteen years in prison for the others, of which six were pardoned. He was then imprisoned in the military prison of Forte Boccea until January 1950, when he was released. On 3 June 1952 he was fully rehabilitated by the Court of Appeal of Rome, and on 18 October 1955 the Court of Cassation overturned the 1947 sentence "for not having committed the crime". Having settled in Forlì, he joined the local section of the Italian Social Movement, dying in his hometown on August 15, 1970. References Bibliography 1885 births 1970 deaths Italian military personnel of World War I Italian military personnel of World War II Italian military personnel of the Second Italo-Ethiopian War Italian generals Italian people convicted of war crimes Blackshirts People of the Italian Social Republic Recipients of the Silver Medal of Military Valor People convicted of treason against Italy
```xml <?xml version="1.0" encoding="utf-8"?> <resources> <color name="red_600_red_300">@color/red_300</color> <color name="components_interactive">@color/red_400</color> </resources> ```
```xml import { describe, expect, it } from "vitest"; import { Formatter } from "@export/formatter"; import { Form } from "./form/form"; describe("Form", () => { describe("#constructor()", () => { it("should create", () => { const tree = new Formatter().format( new Form({ pixels: { x: 100, y: 100, }, emus: { x: 100, y: 100, }, }), ); expect(tree).to.deep.equal({ "a:xfrm": [ { _attr: {}, }, { "a:off": { _attr: { x: 0, y: 0, }, }, }, { "a:ext": { _attr: { cx: 100, cy: 100, }, }, }, ], }); }); it("should create with flip", () => { const tree = new Formatter().format( new Form({ pixels: { x: 100, y: 100, }, emus: { x: 100, y: 100, }, flip: { vertical: true, horizontal: true, }, }), ); expect(tree).to.deep.equal({ "a:xfrm": [ { _attr: { flipH: true, flipV: true, }, }, { "a:off": { _attr: { x: 0, y: 0, }, }, }, { "a:ext": { _attr: { cx: 100, cy: 100, }, }, }, ], }); }); }); }); ```
```java package utilities; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.util.Arrays; import java.util.logging.Level; import java.util.logging.Logger; public class SubprocessUttility { private static final Logger LOGGER = Logger.getLogger(SubprocessUttility.class.getName()); /** * Execute a command in the runtime environment * @param command The command to execute * @param cwd directory in which the command should be executed. Set null or empty string to execute in the current directory * @return stdout and stderr of the command * @throws ExecutionException if there is any exception encountered. */ public static String[] execute(String command, String cwd) throws ExecutionException { final File dir; if (cwd != null && !cwd.isEmpty()) { dir = new File(cwd); } else { dir = null; } return execute(command, new ExceptableFunction<Void, Process, IOException>() { @Override public Process apply(Void d) throws IOException { return Runtime.getRuntime().exec(command, null, dir); } }); } /** * Execute a command in the runtime environment * @param command The command to execute * @param cwd directory in which the command should be executed. Set null or empty string to execute in the current directory * @return stdout and stderr of the command * @throws ExecutionException if there is any exception encountered. */ public static String[] execute(String[] command, String cwd) throws ExecutionException { final File dir; if (cwd != null && !cwd.isEmpty()) { dir = new File(cwd); } else { dir = null; } return execute(String.join(" ", Arrays.asList(command)), new ExceptableFunction<Void, Process, IOException>() { @Override public Process apply(Void d) throws IOException { return Runtime.getRuntime().exec(command, null, dir); } }); } private static String[] execute(String command, ExceptableFunction<Void, Process, IOException> processSupplier) throws ExecutionException { // 0 for stdout, 1 for stderr. final boolean[] fail = new boolean[2]; try { StringBuffer stdout = new StringBuffer(); StringBuffer stderr = new StringBuffer(); // Process process = Runtime.getRuntime().exec(command, null, dir); Process process = processSupplier.apply(null); BufferedReader bufferStdout = new BufferedReader(new InputStreamReader(process.getInputStream())); BufferedReader bufferStderr = new BufferedReader(new InputStreamReader(process.getErrorStream())); Thread t1 = new Thread() { @Override public void run() { try { readFromStream(bufferStdout, stdout); } catch (Exception e) { LOGGER.log(Level.WARNING, "Exception encountered reading stdout of command $" + command, e); fail[0] = true; } } }; t1.start(); Thread t2 = new Thread() { @Override public void run() { try { readFromStream(bufferStderr, stderr); } catch (Exception e) { LOGGER.log(Level.WARNING, "Exception encountered reading stderr of command $" + command, e); fail[1] = true; } } }; t2.start(); t1.join(); t2.join(); process.waitFor(); if (fail[0] || fail[1]) { LOGGER.log(Level.WARNING, "Exception encountered when executing command $" + command); throw new ExecutionException(); } return new String[] {stdout.toString(), stderr.toString()}; } catch (Exception e) { LOGGER.log(Level.WARNING, "Exception encountered while running command " + command, e); throw new ExecutionException(); } } private static void readFromStream(BufferedReader reader, StringBuffer output) throws IOException { String line; while ((line = reader.readLine()) != null) { String trimmed = line.trim(); if (trimmed.length() == 0) { continue; } output.append(trimmed); output.append("\n"); } } /** * Execute a command in the runtime environment * @param command The command to execute * @param cwd directory in which the command should be executed. Set null to execute in the current directory * @return stdout of the command, or empty string if there is any exception encountered. */ public static String execute(String command, File cwd) throws ExecutionException { String path = null; if (cwd != null) { path = cwd.getPath(); } return execute(command, path)[0]; } /** * Execute a command in the runtime environment * @param command The command to execute * @return stdout of the command, or empty string if there is any exception encountered. */ public static String execute(String command) throws ExecutionException { return execute(command, "")[0]; } public static class ExecutionException extends Exception { private static final long serialVersionUID = 6688739122137565700L; private ExecutionException() {} } private SubprocessUttility() {} } ```
Turbonilla farroupilha is a species of sea snail, a marine gastropod mollusk in the family Pyramidellidae, the pyrams and their allies. Description The shell grows to a length of 7.8 mm. Distribution This species occurs in the Atlantic Ocean off Brazil at depths between 42 m and 56 m. References External links To Encyclopedia of Life To World Register of Marine Species farroupilha Gastropods described in 2004
The South Somerset Hydropower Group (SSHG) is a group of 10 owners of former watermills in the South Somerset area of England who are installing micro-hydro turbines for electricity generation. The Group was founded as a result of an initiative by South Somerset District Council, and was the first of its kind in the United Kingdom. The Group won one of the 2005 Ashden Awards, and the concept has been adopted by a number of other similar groups. The South Somerset Hydropower Project was begun in 2001 and the first turbine, at Gants Mill (Pitcombe), was commissioned in 2003. It now produces up to 12 kW of electricity from a 300 mm cross-flow turbine. Other sites in the Project include Clapton Mill (Clapton), Cole Manor (Bruton), Cary's Mill (Martock) and Cutterne Mill (Evercreech). When all the mills are in operation it is expected that they will collectively generate around 600 MWh per year, sufficient to supply about 150 homes and avoid the production of 260 tonnes of carbon dioxide emissions. The SSHG estimates that there are about 40,000 mill sites that might be suitable for micro-hydropower in the United Kingdom. It has been calculated that harnessing the power from all the streams and rivers in the UK could generate 10,000 GWh per year, enough to supply 3% of national generating capacity. See also :Category:Community electricity generation in the United Kingdom Energy use and conservation in the United Kingdom Energy policy of the United Kingdom Energy conservation Renewable energy References External links Gant's Mill (Mill's own site) Clapton Mill (Mill's own site) Hydro Generation - Micro-hydro System Supplier Community electricity generation in the United Kingdom Electric power companies of the United Kingdom Hydroelectricity in the United Kingdom South Somerset Companies based in Somerset
```javascript /* * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * * path_to_url * * Unless required by applicable law or agreed to in writing, * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * specific language governing permissions and limitations */ /** * @param {*} $scope * @param {import("../../../../service/utils/FormUtils")} formUtils * @param {import("../../../../service/utils/TenantUtils")} tenantUtils * @param {import("../../../../service/utils/LocationUtils")} locationUtils * @param {import("../../../../api/RoleService")} roleService * @param {import("../../../../api/TenantService")} tenantService * @param {import("../../../../api/UserService")} userService * @param {import("../../../../models/UserModel")} userModel */ var FormRegisterUserController = function($scope, formUtils, tenantUtils, locationUtils, roleService, tenantService, userService, userModel) { var getRoles = function() { roleService.getRoles() .then(function(result) { $scope.roles = _.sortBy(result, 'name'); }); }; var getTenants = function() { tenantService.getTenant(userModel.user.tenantId) .then(function(tenant) { tenantService.getTenants() .then(function(tenants) { $scope.tenants = tenantUtils.hierarchySort(tenantUtils.groupTenantsByParent(tenants), tenant.parentId, []); tenantUtils.addLevels($scope.tenants); }); }); }; $scope.registration = {}; $scope.register = function(registration) { userService.registerUser(registration); }; $scope.roleLabel = function(role) { return role.name; }; $scope.navigateToPath = (path, unsavedChanges) => locationUtils.navigateToPath(path, unsavedChanges); $scope.hasError = formUtils.hasError; $scope.hasPropertyError = formUtils.hasPropertyError; var init = function () { getRoles(); getTenants(); }; init(); }; FormRegisterUserController.$inject = ['$scope', 'formUtils', 'tenantUtils', 'locationUtils', 'roleService', 'tenantService', 'userService', 'userModel']; module.exports = FormRegisterUserController; ```
Praveen Singh Aron is a politician belonging to the Indian National Congress party. He was a member of the Lok Sabha, the lower house of the Parliament of India from the Bareilly Lok Sabha constituency between 2009 and 2014. He had also served as MLA twice from Bareilly between 1989 and 1991 and 1993 and 1995. He has also served as minister of state in the government of Uttar Pradesh. Positions held 1. 1989-91 Member, Uttar Pradesh Legislative Assembly from Bareilly Cantonment Janata Dal 2. 1993-95 Member, Uttar Pradesh Legislative Assembly (second term) from Bareilly Cantonment Samajwadi Party 3. Minister of State, Science and Information Technology, Government of Uttar Pradesh 4. from 1995 Minister of State, Health and Family Welfare, Government of Uttar Pradesh 5. 2009 Elected to 15th Lok Sabha from Bareilly References 1959 births Living people Indian National Congress politicians People from Bareilly district India MPs 2009–2014 Lok Sabha members from Uttar Pradesh United Progressive Alliance candidates in the 2014 Indian general election Samajwadi Party politicians from Uttar Pradesh Janata Dal politicians Indian National Congress politicians from Uttar Pradesh
James Otis Purdy (July 17, 1914 March 13, 2009) was an American novelist, short-story writer, poet, and playwright who, from his debut in 1956, published over a dozen novels, and many collections of poetry, short stories, and plays. His work has been translated into more than 30 languages and in 2013 his short stories were collected in The Complete Short Stories of James Purdy. He has been praised by writers as diverse as Edward Albee, James M. Cain, Lillian Hellman, Francis King, Marianne Moore, Dorothy Parker, Dame Edith Sitwell, Terry Southern, Gore Vidal (who described Purdy as "an authentic American genius"), Jonathan Franzen (who called him, in Farther Away, "one of the most undervalued and underread writers in America"), A.N. Wilson, and both Jane Bowles and Paul Bowles. Purdy was the recipient of the Morton Dauwen Zabel Fiction Award from the American Academy of Arts and Letters (1993) and was nominated for the 1985 PEN/Faulkner Award for his novel On Glory's Course (1984). In addition, he won two Guggenheim Fellowships (1958 and 1962), and grants from the Ford Foundation (1961), and Rockefeller Foundation. He worked as an interpreter, and lectured in Europe with the United States Information Agency. Early life, education and early career Purdy was born in Hicksville, Ohio, in 1914. His family moved to Findlay, Ohio, when he was about five years old, where he graduated from Findlay High School in 1932. Purdy's parents went through a separation and then a bitter divorce in 1930 after his father lost large sums of money in investments gone bad. His mother then converted their home in Findlay to a boarding house of which she was proprietress. Purdy earned a Bachelor of Arts teaching degree in French from Bowling Green State College in 1935, and taught French at the Greenbrier Military School in West Virginia. Then he studied at the University of Chicago, where he earned a master's degree in English in 1937. He joined the United States Army in May 1941. After serving in the Army, he studied Spanish at the University of Chicago (1944–45). He spent the summer of 1945 at the University of Puebla, Mexico, and taught English at the Ruston Academy in Havana, Cuba, in 1945–1946. For the next nine and a half years, he taught Spanish at Lawrence College, in Appleton, Wisconsin. In the mid-1950s, with encouragement and support from Miriam and Osborn Andreas and the Andreas Foundation (Archer Daniels Midland), Purdy returned to Chicago to pursue writing. Artistic scenes and influences In 1935, soon after his arrival in Chicago to attend the University of Chicago, Purdy, broke and without friends, met the painter Gertrude Abercrombie. She was nicknamed the "Queen of the Bohemian Artists". His vast body of work includes many works inspired by his close relationship with Abercrombie and her underground salon (which had its roots in the salon of Gertrude Stein). During the 1930s, Purdy was one of Abercrombie's closest friends. This American incarnation of the creative parlour had at the center those who were to become the jazz greats: Percy Heath, Sonny Rollins, Erroll Garner, Dizzie Gillespie, Charlie Parker, Max Roach, Miles Davis and Sarah Vaughan. Purdy attended the all-night, weekend gatherings where bebop and jazz were improvised by these greats (many times with Abercrombie at the piano). The concerts impressed him deeply. "Through these jazz singers and musicians, who would often stay with Abercrombie, young Purdy received an intensive education in African American music and culture." Indeed, the high incidence of black figures in Purdy's work went unnoticed by critics and reviewers because they were so thoroughly integrated. Equally important was his intensive study as a young boy of the Old Testament in the King James Version of the Bible as well as the Complete Works of William Shakespeare. All were key in making Purdy the writer he became. For quite some time during his Chicago years, Purdy lived in Abercrombie's "ruined" mansion, with members of the Modern Jazz Quartet. The music and lives these jazz musicians were able to create from their own humble origins inspired Purdy to realize that he could create a uniquely individual voice in literature, using his American small-town speech patterns and his worlds of poverty and neglect. Abercrombie and those in her "circle" had done the same with painting. They had "taken the essence of our music and transported it to another form", according to her friend and fellow artist Dizzie Gillespie. Purdy's associations with these jazz artists and especially his meeting with Billie Holiday gave him the insight as well as the confidence to move from an upstart and lost boy, prone to running wild, to a world-class writer and artist. His relationship to the painters in Abercrombie's circle of magic realists Ivan Albright, Dudley Huppler, Karl Priebe, Julia Thecla, and John Wilde helped develop the strokes of imagery he would use to create his own version of an American "magic realism" in literature. Writing The influence of Chicago's jazz scene and the experience of the "New Negro Renaissance" is reflected in all his early work. It begins with the short story "Eventide" printed first in the private collection Don't Call Me by My Right Name and then commercially in the collection Color of Darkness (Teeboy who would never be coming home again, played the tenor saxophone at The Music Box and had his hair made straight), to the novella 63 Dream Palace (63rd Street is home to the Chicago jazz scene), then to Children is All, Cabot Wright Begins, and Eustace Chisholm and the Works. Even his small-town Ohio novel The Nephew echoes the story of the boy who would never be coming home again. "Eventide" was the pivotal story which led to his becoming a published writer. His final novel Gertrude of Stony Island Avenue harks back to a remembrance of painter Abercrombie and others in her circle of artists. Narrow Rooms (1977) is, at an initial level, a personal communication looking back some 25 years to Wendell Wilcox, a failed writer in the Abercrombie circle. Wilcox, who had once enjoyed a degree of success, stopped publishing at the very moment Purdy began commercial publication. Always of major significance was jazz both in Chicago and New York City. Shortly after his move to New York City, Carl Van Vechten and the Harlem Renaissance circle became a lens for his work. The comic novels I am Elijah Thrush, Out with the Stars and Garments the Living Wear are the New York incarnations of this reflection. Abercrombie also introduced the young student to others in her circle, to Miriam Bomberger Andreas and to the industrialist and literary essayist, Osborn Andreas, both of whom would become extremely significant in Purdy's life and work. His first book, Don't Call Me by My Right Name and Other Stories, was privately published by Osborn with the Andreas Foundation. The title story is based on Andreas' wife, Miriam. His first five books, with the exception of The Nephew, were inspired by his association with Miriam and Osborn Andreas. His first novel, which set forth his own developing style of American magic realism, was praised lavishly by Dorothy Parker and others of great literary merit. It was for decades a staple of the undergraduate American Literature curriculum of many American colleges and universities. If Abercrombie and the Andreases inspired Purdy to become a writer, then Dame Edith Sitwell made him a known one. When she received the privately printed edition, which Purdy had on a hunch sent to her, of Don't Call Me by My Right Name and Other Stories, she was convinced she had discovered a great black writer from the story "Eventide", which she felt only a black man could write. After she had asked Purdy to supply more instances of his work, Purdy sent her his newly published private edition of 63: Dream Palace. Both books were designed by Purdy with his own unique drawings. Upon the additional basis of this new work, Sitwell had become convinced that he was "a writer of genius" (her words); and she obtained a serious commercial publisher for his work in England. She would later write the prefaces for the publication of both these works. Her reviews, pronouncements, and assessments of his further works helped him create a coterie of supporters (notably Parker and Angus Wilson) both in England and the U.S. Purdy felt he would never have been a known writer without her: "My stories were always returned with angry, peevish, indignant rejections from the New York slick magazines and they earned if possible even more hostile comments from the little magazines. All editors were insistent that I would never be a published writer." Obstacles to wider acceptance Through all his work, Purdy dealt primarily with outsiders: women, blacks, Native Americans (his maternal grandmother was 1/8 Ojibway), homosexuals (living far outside the conventional gay community) – anyone who could be seen to be outside the circle of "normal" acceptability. His final short story, Adeline, written at age 92, is a tale of transgender acceptance. Much of his early work takes place in extreme poverty, and is located in a small-town, heightened American vernacular. In the beginning of her assessment of him, Sitwell felt he was always writing the black experience without necessarily mentioning race. Purdy's association with the American black experience is paramount to understanding him as an artist. In addition to his beginnings with Abercrombie, Van Vechten took him up when he arrived in New York City and introduced him to his own important New York City circle of black artists, boxers and activists. Langston Hughes praised Purdy as "the last of the [n-word] writers" for his use of the vernacular. He was seen as a master of different kinds of American vernacular as well. In addition to his knowledge of modern European languages, Purdy knew Latin and ancient Greek, and maintained an extensive classical library. His novel In a Shallow Grave has overt classical references running throughout, as do many others. His final novel Gertrude of Stony Island Avenue echoes the story of Demeter descending into Hades in search of her daughter Persephone. The novels that beleaguered his reputation, such as Eustace Chisholm & the Works and Narrow Rooms, merely restate in a modern context the psychology of Dionysus set forth in The Bacchae by Euripides. The outer texture of his work is realistic while the deeper and more elusive interior reveals a mythic, almost archetypal trail. Its great age is apparent; its history is clearly rooted in the classics and in the Old Testament. Thus his work can be very American but it can be appreciated by a western reader familiar with these literatures. In his compressed dialogue structure, Purdy was ahead of his time. Much-later writers like David Mamet, Harold Pinter, and Samuel Beckett (also an admirer) paved the way to the acceptance of works in this "distilled" style which has now become the sine qua non of the modern audience with its very different attention span. His early stories from the 1940s and 1930s were, because of their brevity, not even considered short stories at all at the time. Now this brevity of conveying a fullness and richness of experience in what Sitwell called a "marrow of form" has almost become a necessary standard. Both his "distilled" style and his reliance on dialogue to tell his story eluded the normal contemporary reader of his early days. There was an ingrained custom towards a much longer, more expository experience. His roots were in drama. Purdy started writing plays as a child, crafting them to win his elder brother's approval. Purdy would act all the characters in the plays, and play them out using stick-figures, which is consistent with the early origins of Federico García Lorca. Purdy became known as a "homosexual writer" after the publication of Eustace Chisholm and the Works. Gore Vidal indicates that one obstacle to his more widespread recognition was the impossibility of reconciling his work that was labeled and published as "gay" to some of his other works and especially to the Faulkneresque novels based on his ancestors. Even today, as Vidal asserts, it is a problem that needs a solution. Sitwell had recognized this when she stated that Purdy "has enormous variety". Cutting edge From the start, his work had often been at the edge of what was printable under American censorship. The major US publishing houses rejected his two early books 63: Dream Palace (1956), and Colour of Darkness (1961), which had to be printed privately abroad. The publishers, according to Purdy, believed that he was insane. In 1972, the supposedly liberal New York literary establishment was outraged by his I am Elijah Thrush. Although his work was appreciated in Europe, Purdy encountered censorship there too. Victor Gollancz could not bring himself to print the word "motherfucker" in the 1957 UK edition of 63: Dream Palace. As late as May 1990, the German government tried to ban Narrow Rooms, but received the ruling that it was a "work of the literary imagination which had no business in the courts". Although many readers were scandalized, a solid cadre of distinguished critics and scholars embraced his work from the start, including John Cowper Powys, Dame Edith, Dorothy Parker, and Susan Sontag, who warmly defended him against puritanical critics. Tennessee Williams was also an early admirer of Purdy's work. Cabot Wright Begins and Eustace Chisholm & the Works In January 1966, an incendiary manifesto by Stanley Kauffmann set forth a bluntly damning and prejudicial way of criticizing works by homosexual writers. The article stirred the arts community. This finger in the wind of the so-called liberal critical establishment actually reflected the deep nature of an institutionalized prejudice throughout the media. Soon afterwards, Purdy set out to write a novel of what he experienced in Abercrombie's Chicago scene of the 1930s. This time it was to reflect his fitfully terminated friendship with Wendell Wilcox, a writer of minor achievement in their circle. It would also include a scathing portrait of the department store heir Norman Macleish of the noted Chicago family. All of Purdy's work after Eustace Chisholm would subsequently be met with both great praise on the one side, and stern, vehement condemnation and misunderstanding on the other. In 1967, a year after the publication of the treatise to limit homosexual artists, his Eustace Chisholm and the Works his "undisguised" bisexual work was put forth. The novel is dedicated to Albee. Several high-profile critics were extremely hostile to the book, with its violent and explicitly homoerotic content. Purdy recalled in 1993 that he was "burned at the stake" in the New York Times review of Eustace Chisholm. Critically it was thrown to an interpretation of and by this new Kauffmann assessment (quoted as the source in the review) and was vehemently condemned on all grounds including moral ones. The "noble" hatchet type review followed exactly the policy which had been set forth two years earlier. The attack surrounding the book chilled Purdy's growing popularity though the book sold more copies than any of his other works. Combined with this critical reception (and its effect on Purdy) of both Cabot Wright Begins and Eustace Chisholm & the Works was the fact that, by the time of publication of these novels, all his immediate family, his friends and his supporters had died. This included Sitwell, Van Vechten, Parker, Powys, and Purdy's brother who had been a noteworthy actor in New York City and very important to his development in literature. This eliminated all the defenders of both him as a writer and the two novels themselves. Osborn Andreas, his patron, had also died. All these deaths occurred within a two-year period between 1965 and 1967, devastating Purdy's basis of support financially, critically and personally. "I soon realized that if my life up to then had been a series of pitched battles, it was to be in the future a kind of endless open warfare", Purdy wrote in an autobiographical sketch in 1984. Later works At a loss to know how to proceed and with his career seemingly shattered, Purdy began looking at pictures of his long-dead relatives for solace and validation. He began to remember ever more vividly the stories his grandmother and great-grandmother told him when he was a child, about eminent people, mostly women, and most often on the outside of a hidebound code of acceptance in the long-ago towns of the hill country of Ohio. In 1968, he began a series of independent but interconnected books (and plays) about the characters who populated these tales from his childhood, Sleepers in Moon Crowned Valleys. In his hands, they were to become the voices and journeys of an almost mythic people of a uniquely different and undiscovered America. He would follow them in their navigation through life and circumstance. The narratives were something that could be found perhaps in the archives of a historical society in the towns set into the farm country and rolling hills of the Midwest. Through these memories there began to flow also the remembrance of the country vernacular and way of speaking of his great-grandparents. He began to create, in association with these individuals and their stories, a voice that Paul Bowles would call "the closest thing we have to a classical American colloquial". Regarding Sleepers in Moon Crowned Valleys, Gore Vidal stated in his New York Times essay, "Each novel stands entire by itself while the whole awaits archeology and constitution of a work that is already like no other." As part of the series in 1974 he published The House of the Solitary Maggot, which is often regarded as his most ambitious work. It was largely ignored. In 1978, he published Narrow Rooms (a set of violent and obsessive homosexual relationships, based in West Virginia). This was nearly developed into a film directed by Derek Jarman in 1992 for Channel Four, but Purdy objected to the casting of Kevin Collins. Jarman refused any other actor, so the film stalled. Breakthrough in evaluation The 1997 publication of Purdy's final novel, Gertrude of Stony Island Avenue, reflected a reappraisal of his work. A New York Times review assessed him as a "singular American visionary". On the last reprints of several of his books, a further essay by Gore Vidal in The New York Times, entitled "The Novelist As Outlaw," framed him as "an authentic American genius". In 2005, the novel that had held Purdy's reputation at bay for decades, Eustace Chisholm and the Works, received the Clifton Fadiman Award at the Mercantile Library. Given to overlooked novels, the prize was presented to Purdy by Jonathan Franzen, who declared in his speech, "Mr. Purdy’s novel is so good that almost any novel you read immediately after it will seem at least a little bit posturing, or dishonest, or self-admiring, in comparison." Following several reissues of previously out-of-print novels, as well as Vidal's appreciation in The New York Times Book Review, Purdy's work again enjoyed a brief small renaissance in the first decade of the 2000s, including among younger writers. As Albee wrote, "there is a Purdy renaissance every ten years, like clockwork". Shortly after his death in 2009, a book of plays, Selected Plays of James Purdy, including Brice, Ruthanna Elder, Where Quentin Goes and The Paradise Circus, was published by Ivan R. Dee. It focuses on Purdy's playwriting as being his first form of writing since childhood, when he wrote plays for his brother to perform. John Waters contributed the following blurb on the cover: "James Purdy's Selected Plays will break your damaged little heart." Further evidence of the twenty-first-century revival of Purdy's reputation is Oxford University Press's publication of his biography by Michael Snyder. Snyder says that "Purdy got under the skin of America to something deep, universal and macabre." Later life For nearly 50 years he lived and wrote in a small apartment in a Brooklyn Heights landmarked building surrounded by dozens of framed boxing prints from the turn of the 20th century, bare-knuckled champs in the makeshift outside rings of their day. To the end of his life, he continued to dictate to a small team of devoted friends, and ascribed his continued intellectual vigor to the drinking of green tea and the avoidance of alcohol and tobacco. His advice to young writers was to "banish shame". Purdy died aged 94, in a nursing home in Englewood, New Jersey, on March 13, 2009. In accordance with his wishes, he was cremated and his ashes transported to Northamptonshire, England, to be buried next to his benefactor Sitwell. Legacy Purdy wrote anonymous letters from the age of nine: his first was written to his mother's landlady, whom Purdy disliked. Subsequently he wrote countless thousands, many now owned by persons who have no idea of their provenance or value, although the style is inimitable. They feature some of Purdy's drawings, which have attracted some attention. The American composer Robert Helps, a close friend of Purdy's, used Purdy's texts in two of his works, The Running Sun and Gossamer Noons, both of which have been recorded by the soprano Bethany Beardslee. The American song composer Richard Hundley composed many songs to poems of Purdy, his friend as well of several decades in New York City. Some of his works set to Purdy's poetry, like "Come Ready and See Me", have been praised as true classics in the medium of the American song. In an autobiographical sketch in 1984, Purdy stated, "My work has been compared to an underground river which is flowing often undetected through the American landscape". He received the Bill Whitehead Award for Lifetime Achievement from Publishing Triangle in 1991. Bibliography 63: Dream Palace (short stories) (1956) Malcolm (1959) Colour of Darkness (short stories) (1961) The Nephew (1961) Children Is All (1963) Cabot Wright Begins (1965) Eustace Chisholm and the Works (1967) Jeremy's Version (1970) I Am Elijah Thrush (1972) Color of Darkness & Malcolm (1974) The House of the Solitary Maggot (1974) In a Shallow Grave (1976) A Day After the Fair: A Collection of Plays and Short Stories (1977)Narrow Rooms (1978)Lessons and Complaints (poems) (1978)Dream Palaces: Three Novels (omnibus) (1980)Proud Flesh: Four Short Plays (1980)Mourners Below (1981)Scrap of Paper & The Beiry-Picker: Two Plays by James Purdy (1981), published by Sylvester & OrphanosOn Glory's Course (1984)The Brooklyn Branding Parlors (poems) (1986)In the Hollow of His Hand (1986)The Candles of Your Eyes (1988)Garments the Living Wear (1989)Collected Poems (1990)Out with the Stars (1992)In the Night of Time and Four Other Plays (1992)Dream Palace: Selected Stories, 1956–87 (1992)Reaching Rose (1994)Epistles of Care (1995)Gertrude of Stony Island Avenue (1996)Moe's Villa and Other Stories (short stories) (2000, 2005)James Purdy: Selected Plays (2009) The Complete Short Stories of James Purdy (2013) LETTERS "The Correspondence of James Purdy and John Cowper Powys 1956–1963", edited with an introduction by Michael Ballin and Charles Lock. Powys Journal, Vol. XXIII (August 2013). References Cited sources Snyder, Michael E. (2009) Mixedblood Metaphors: Allegories of Native America in the Fiction of James Purdy'', Doctoral Thesis. University of Oklahoma External links The James Purdy Society The James Purdy Papers at The Ohio State University's Rare Books & Manuscripts Library A compendium of New York Times' reviews and articles on James Purdy and his work The Literary Encyclopedia entry on James Purdy Who Is James Purdy? – an appreciation from Edward Albee A conversation with James Purdy Hyperion: On the Future of Aesthetics, Vol. VI, No. 1. This special issue devoted to James Purdy features essays, interviews, a bibliography, and more. An Autobiographical Sketch by James Purdy James Purdy: Memento Mori (jamespurdy.org) James Purdy manuscripts collection held by Special Collections, University of Delaware Library Robert A. Wilson collection related to James Purdy held by Special Collections, University of Delaware Library James Purdy Papers. Yale Collection of American Literature, Beinecke Rare Book and Manuscript Library. James Purdy Papers at the Harry Ransom Center. 1914 births 2009 deaths 20th-century American novelists 20th-century American dramatists and playwrights 20th-century American educators 20th-century American poets American expatriates in Cuba American expatriates in Mexico American LGBT military personnel American male novelists Schoolteachers from Ohio Bowling Green State University alumni People from Brooklyn Heights English-language poets American gay writers Interpreters Lawrence University faculty Language teachers Military personnel from New York City Writers from Brooklyn Novelists from New York City People from Hicksville, Ohio People from Findlay, Ohio University of Chicago alumni American LGBT dramatists and playwrights United States Army soldiers Novelists from Ohio American LGBT poets American LGBT novelists American male short story writers American male poets American male dramatists and playwrights 20th-century American short story writers PEN/Faulkner Award for Fiction winners 20th-century American male writers Findlay High School alumni 20th-century translators 20th-century American LGBT people Gay poets
Kankan Region is a region of Guinea located in the east of the country. It is the largest region of Guinea by area. The region has five (5) prefectures (Kankan, Kérouané, Kouroussa, Mandiana and Siguiri). It is bordered by the countries of Mali and Côte d'Ivoire and the Guinean regions of Nzérékoré and Faranah. Administrative divisions Kankan Region is divided into five prefectures; which are further sub-divided into sub-prefectures: References Regions of Guinea
```c // Code generated by gioui.org/cpu/cmd/compile DO NOT EDIT. //go:build linux && (arm64 || arm || amd64) // +build linux // +build arm64 arm amd64 #include <stdint.h> #include <stddef.h> #include "abi.h" #include "runtime.h" #include "binning_abi.h" const struct program_info binning_program_info = { .has_cbarriers = 1, .min_memory_size = 100000, .desc_set_size = sizeof(struct binning_descriptor_set_layout), .workgroup_size_x = 128, .workgroup_size_y = 1, .workgroup_size_z = 1, .begin = binning_coroutine_begin, .await = binning_coroutine_await, .destroy = binning_coroutine_destroy, }; ```
Joel Herschel Williams (March 18, 1926 – March 10, 1997) was an American football center in the National Football League (NFL) for the Baltimore Colts. He also played in the All-America Football Conference (AAFC) for the San Francisco 49ers. Williams played college football at the University of Texas and was drafted in the 22nd round of the 1948 NFL Draft by the Washington Redskins. External links Just Sports Stats 1926 births 1997 deaths Sportspeople from San Angelo, Texas Players of American football from Texas American football centers Canadian football offensive linemen American players of Canadian football Texas Longhorns football players San Francisco 49ers (AAFC) players Baltimore Colts (1947–1950) players Edmonton Elks players Hamilton Tiger-Cats players San Francisco 49ers players
Daluwakgoda is a small town in Sri Lanka. It is located within Southern Province. See also List of towns in Southern Province, Sri Lanka External links Populated places in Southern Province, Sri Lanka
```java /* * Use of this source code is governed by the GPL v3 license * that can be found in the LICENSE file. */ package de.neemann.digital; /** * Helper to create a list of strings. * Takes care of adding a separator at the right places in between the items. */ public class StringList { private final StringBuilder builder; private boolean first; private String sep; /** * Create a new instance */ public StringList() { this(new StringBuilder()); } /** * Creates a new instance * * @param builder the StringBuilder to use */ public StringList(StringBuilder builder) { this.builder = builder; this.first = true; this.sep = " "; } /** * Sets the separator. * The default value is a blank. * * @param sep the separator * @return this for chained calls */ public StringList separator(String sep) { this.sep = sep; return this; } /** * Adds a item to the list. * Adds a separator if needed. * * @param item the item to add. * @return this for chained calls */ public StringList add(String item) { if (first) first = false; else builder.append(sep); builder.append(item); return this; } @Override public String toString() { return builder.toString(); } } ```
```javascript The difference between null, undefined and NaN Detect an error type `console.*` in JavaScript Inaccuracy of binary floating-point format Round numbers to `N` decimals ```
The list of ship launches in 1925 includes a chronological list of some ships launched in 1925. References Sources 1925 Ship launches
Billy Lau Nam Kwong (born 3 April 1954) is a Hong Kong film actor. He is best known for playing the Police Captain in Mr. Vampire (1985), and went on to be cast in similar roles. He has appeared in many comedy and horror films. Personal life Before acting Lau was an optician for some time and treated Bey Logans' wife's. After the closure of Digital Broadcasting Corporation on 10 October 2012. Lau, activists and radio hosts began a three-day sit-in protest in front of the government headquarters due to freedom of speech concerns. Lau went on hunger strike more than 130 hours and eventual went to hospital. An Internet radio station D100 was established due to the closure of Digital Broadcast Corporation. Filmography TV Series Film TV ads 1984–1986: Ocean Park Hong Kong Nomination Lau was nominated at Hong Kong film for best supporting actor and best newcomer but lost to Maggie Cheung – Behind the Yellow Line References External links Billy Lau at Hong Kong Movie Database 1954 births Living people 20th-century Hong Kong male actors 21st-century Hong Kong male actors Hong Kong male film actors Hong Kong film directors Hong Kong film presenters Hong Kong film producers Hong Kong screenwriters
```ruby require_relative '../../spec_helper' require_relative '../../shared/file/grpowned' describe "File.grpowned?" do it_behaves_like :file_grpowned, :grpowned?, File it "returns false if file the does not exist" do File.grpowned?("i_am_a_bogus_file").should == false end end ```
```xml import { useCallback, useEffect, useRef } from 'react' import { EventSourcePolyfill } from 'event-source-polyfill' import { mockBackend, sseUrl } from '../lib/consts' import { useNumber } from 'react-use' import { ResourcesIdSortedByWorkspaceIds, SerializedAppEvent, } from '../interfaces/db/appEvents' import { getMapFromEntityArray } from '../../design/lib/utils/array' import { getResources } from '../api/teams/resources' import { SerializedWorkspace } from '../interfaces/db/workspace' import { SerializedTag } from '../interfaces/db/tag' import { useGlobalData } from '../lib/stores/globalData' import { useNav } from '../lib/stores/nav' import { usePage } from '../lib/stores/pageStore' import { SerializedTeam } from '../interfaces/db/team' import { getTemplate } from '../api/teams/docs/templates' import { getUniqueFolderAndDocIdsFromResourcesIds } from '../lib/utils/patterns' import { getAccessToken, useElectron } from '../lib/stores/electron' import { useNotifications } from '../../design/lib/stores/notifications' import { useComments } from '../lib/stores/comments' import { SerializedView } from '../interfaces/db/view' interface EventSourceProps { teamId: string } const defaultReconnectionDelay = 500 // 5ms const maxReconnectionDelay = 600000 // 10min const EventSource = ({ teamId }: EventSourceProps) => { const eventSourceRef = useRef<EventSource | undefined>() const [eventSourceSetupCounter, { inc }] = useNumber(0) const reconnectionDelayRef = useRef<number>(defaultReconnectionDelay) const { usingElectron, sendToElectron } = useElectron() const { team, removeUserInPermissions, updateUserInPermissions, updateTeamSubscription, updateSinglePermission, removeSinglePermission, setPartialPageData, } = usePage() const { removeFromTagsMap, updateTagsMap, updateWorkspacesMap, removeFromWorkspacesMap, docsMap, foldersMap, removeFromDocsMap, removeFromFoldersMap, updateDocsMap, updateFoldersMap, updateTemplatesMap, removeFromTemplatesMap, updateSmartViewsMap: updateSmartViewFoldersMap, removeFromSmartViewsMap: removeFromSmartViewFoldersMap, updateAppEventsMap, removeFromViewsMap, updateViewsMap, } = useNav() const { setPartialGlobalData, globalDataRef, globalData: { currentUser, teams }, } = useGlobalData() const { commentsEventListener } = useComments() const { notificationsEventListener } = useNotifications() const setupEventSource = useCallback( (url: string) => { if (mockBackend) { return } if (eventSourceRef.current != null) { eventSourceRef.current.close() } const accessToken = getAccessToken() const newEventSource = new EventSourcePolyfill(url, { withCredentials: true, headers: accessToken != null ? { ['Authorization']: `Bearer ${accessToken}`, } : {}, }) newEventSource.onerror = () => { newEventSource.close() setTimeout(() => { setupEventSource(eventSourceRef.current!.url) reconnectionDelayRef.current = Math.min( defaultReconnectionDelay * ((reconnectionDelayRef.current / defaultReconnectionDelay) * 2), maxReconnectionDelay ) }, reconnectionDelayRef.current) } eventSourceRef.current = newEventSource inc(1) }, [inc] ) useEffect(() => { setupEventSource(`${sseUrl}/events/${teamId}`) return () => { try { eventSourceRef.current!.close() } catch (error) {} } }, [teamId, setupEventSource]) const userRemovalEventHandler = useCallback( (event: SerializedAppEvent) => { if (event.data.userId === null) { return } // global setPartialGlobalData({ teams: globalDataRef.current.teams.map((team) => { return { ...team, permissions: team.permissions.filter( (p) => p.user.id !== event.data.userId ), } }), }) //page removeUserInPermissions(event.data.userId) }, [setPartialGlobalData, globalDataRef, removeUserInPermissions] ) const userUpdateEventHandler = useCallback( (event: SerializedAppEvent) => { if (event.userId != null && event.data.displayName != null) { updateUserInPermissions({ id: event.userId, displayName: event.data.displayName, }) } }, [updateUserInPermissions] ) const subscriptionChangeEventHandler = useCallback( (event: SerializedAppEvent) => { if (event.data.subscription.status === 'inactive') { updateTeamSubscription(undefined) if (usingElectron) { sendToElectron('subscription-delete', event.data.subscription) } } else { updateTeamSubscription(event.data.subscription) if (usingElectron) { sendToElectron('subscription-update', event.data.subscription) } } }, [updateTeamSubscription, usingElectron, sendToElectron] ) const permissionsUpdateEventHandler = useCallback( (event: SerializedAppEvent) => { if (event.data.userPermissions != null) { updateSinglePermission(event.data.userPermissions) } }, [updateSinglePermission] ) const permissionsRemoveEventHandler = useCallback( (event: SerializedAppEvent) => { if (event.data.userPermissionsId != null) { removeSinglePermission(event.data.userPermissionsId) } }, [removeSinglePermission] ) const teamUpdateHandler = useCallback( (event: SerializedAppEvent) => { const eventTeam = event.data.team as Partial<SerializedTeam> if (eventTeam != null && team != null) { setPartialPageData({ team: { ...team, ...eventTeam } }) const updatedTeams = teams.map((t) => { if (t.id === eventTeam.id) { return { ...t, ...eventTeam } } return t }) setPartialGlobalData({ teams: updatedTeams }) } if (usingElectron) { sendToElectron('team-update', { ...team, ...eventTeam }) } }, [ setPartialGlobalData, setPartialPageData, teams, team, usingElectron, sendToElectron, ] ) const tagChangeEventHandler = useCallback( (event: SerializedAppEvent) => { const tag = event.data.tag as SerializedTag if (event.type === 'tagCreate') { updateTagsMap([tag.id, tag]) return } removeFromTagsMap(tag.id) }, [removeFromTagsMap, updateTagsMap] ) const workspaceChangeEventHandler = useCallback( async (event: SerializedAppEvent) => { if (event.type !== 'workspaceRemoval') { const workspace = event.data.workspace as SerializedWorkspace updateWorkspacesMap([workspace.id, workspace]) const addedUsers = event.data.added as string[] if (currentUser != null && addedUsers.includes(currentUser.id)) { const { docs, folders, workspaces } = await getResources( event.teamId!, { resourcesIds: [], workspacesIds: [workspace.id], } ) const changedWorkspaces = getMapFromEntityArray(workspaces) updateWorkspacesMap(...changedWorkspaces) const changedFolders = getMapFromEntityArray(folders) updateFoldersMap(...changedFolders) const changedDocs = getMapFromEntityArray(docs) updateDocsMap(...changedDocs) } return } if (typeof event.data.workspace !== 'string') { return } removeFromWorkspacesMap(event.data.workspace) removeFromDocsMap( ...[...docsMap.values()] .filter((doc) => doc.workspaceId === event.data.workspace) .map((doc) => doc.id) ) removeFromFoldersMap( ...[...foldersMap.values()] .filter((folder) => folder.workspaceId === event.data.workspace) .map((folder) => folder.id) ) }, [ removeFromWorkspacesMap, updateWorkspacesMap, docsMap, removeFromDocsMap, foldersMap, removeFromFoldersMap, currentUser, updateDocsMap, updateFoldersMap, ] ) const templateChangeEventHandler = useCallback( async (event: SerializedAppEvent) => { if (event.type === 'templateDelete') { if (typeof event.data.template === 'string') { removeFromTemplatesMap(event.data.template) } return } try { if (typeof event.data.template === 'string') { const { template } = await getTemplate(event.data.template) updateTemplatesMap([template.id, template]) } else { updateTemplatesMap([event.data.template.id, event.data.template]) } } catch (error) {} }, [removeFromTemplatesMap, updateTemplatesMap] ) const eventSourceResourceUpdateHandler = useCallback( async (event: SerializedAppEvent) => { try { if (event.teamId == null) { return } const resourcesIds: string[] = [] const workspacesIds: string[] = [] if (event.data != null) { if ( event.type === 'resourcesUpdate' && event.data['resources'] != null ) { const data = event.data[ 'resources' ] as ResourcesIdSortedByWorkspaceIds const idSet = new Set<string>() Object.keys(data).forEach((workspaceId) => { workspacesIds.push(workspaceId) ;(data[workspaceId] || []).forEach((resourceId) => { idSet.add(resourceId) }) }) resourcesIds.push(...idSet.values()) } if (event.data['resource'] != null) { resourcesIds.push(event.data['resource']) } if (event.data['workspaceId'] != null) { workspacesIds.push(event.data['workspaceId']) } } const { docs, folders, workspaces } = await getResources(event.teamId, { resourcesIds, workspacesIds, minimal: true, }) /** -- update -- **/ const changedWorkspaces = getMapFromEntityArray(workspaces) updateWorkspacesMap(...changedWorkspaces) workspacesIds.forEach((id) => { if (!changedWorkspaces.has(id)) { removeFromWorkspacesMap(id) } }) const changedFolders = getMapFromEntityArray(folders) updateFoldersMap(...changedFolders) const changedDocs = getMapFromEntityArray(docs) updateDocsMap(...changedDocs) /** check removals **/ const { uniqueFoldersIds, uniqueDocsIds } = getUniqueFolderAndDocIdsFromResourcesIds(resourcesIds) uniqueFoldersIds.forEach((folderId) => { if (!changedFolders.has(folderId)) { removeFromFoldersMap(folderId) } }) uniqueDocsIds.forEach((docId) => { if (!changedDocs.has(docId)) { removeFromDocsMap(docId) } }) } catch (error) {} }, [ updateDocsMap, removeFromDocsMap, updateFoldersMap, updateWorkspacesMap, removeFromFoldersMap, removeFromWorkspacesMap, ] ) const smartViewUpdateHandler = useCallback( (event: SerializedAppEvent) => { updateSmartViewFoldersMap([event.data.smartView.id, event.data.smartView]) }, [updateSmartViewFoldersMap] ) const smartViewDeleteHandler = useCallback( (event: SerializedAppEvent) => { removeFromSmartViewFoldersMap(event.data.smartViewId) }, [removeFromSmartViewFoldersMap] ) const viewChangeEventHandler = useCallback( (event: SerializedAppEvent) => { const view = event.data.view as SerializedView if (event.type === 'viewDeleted') { removeFromViewsMap(view.id) return } updateViewsMap([view.id, view]) }, [removeFromViewsMap, updateViewsMap] ) /// re-assign handler on change useEffect(() => { if (eventSourceRef.current != null && eventSourceSetupCounter > 0) { eventSourceRef.current.onopen = () => (reconnectionDelayRef.current = defaultReconnectionDelay) eventSourceRef.current.onmessage = (eventData: MessageEvent) => { const event = JSON.parse(eventData.data) as SerializedAppEvent switch (event.type) { case 'teamUpdate': teamUpdateHandler(event) break case 'permissionsCreate': permissionsUpdateEventHandler(event) break case 'permissionsRemoval': permissionsRemoveEventHandler(event) break case 'subscriptionUpdate': case 'subscriptionCreate': subscriptionChangeEventHandler(event) break case 'createDoc': case 'contentUpdate': case 'resourcesUpdate': case 'archiveDoc': case 'unarchiveDoc': eventSourceResourceUpdateHandler(event) break case 'userRemoval': userRemovalEventHandler(event) break case 'userUpdate': userUpdateEventHandler(event) break case 'tagCreate': case 'tagRemoval': tagChangeEventHandler(event) break case 'templateDelete': case 'templateUpdate': templateChangeEventHandler(event) break case 'workspaceCreate': case 'workspaceRemoval': case 'workspaceUpdate': workspaceChangeEventHandler(event) break case 'commentThreadCreated': case 'commentThreadUpdated': case 'commentThreadDeleted': case 'commentCreated': case 'commentUpdated': case 'commentDeleted': commentsEventListener(event) break case 'smartViewCreate': case 'smartViewUpdate': smartViewUpdateHandler(event) break case 'smartViewDelete': smartViewDeleteHandler(event) break case 'notificationCreated': case 'notificationViewed': notificationsEventListener(event) break case 'viewCreated': case 'viewUpdated': case 'viewDeleted': viewChangeEventHandler(event) break } updateAppEventsMap([event.id, event]) } } return }, [ eventSourceResourceUpdateHandler, eventSourceSetupCounter, userRemovalEventHandler, userUpdateEventHandler, subscriptionChangeEventHandler, permissionsRemoveEventHandler, permissionsUpdateEventHandler, workspaceChangeEventHandler, tagChangeEventHandler, teamUpdateHandler, templateChangeEventHandler, commentsEventListener, smartViewUpdateHandler, smartViewDeleteHandler, updateAppEventsMap, notificationsEventListener, viewChangeEventHandler, ]) return null } export default EventSource ```
Events from the year 1663 in Sweden Incumbents Monarch – Charles XI Events The Swedish Collegium medicum is founded to supervise the medical professions. The pirate Gustav Skytte is exposed, trialed and executed. Births August - Amalia Königsmarck, painter, actress and poet (died 1740) unknown - Gävle Boy, notorious witch finder (died 1676) 16 April - Maria Elizabeth of Pfalz-Zweibrücken, princess (died 1748) Deaths 8 March - Hans Christoff von Königsmarck, soldier (born 1600) Johan Björnsson Printz, governor (born 1592) Gustav Skytte, pirate (born 1637) References Years of the 17th century in Sweden Sweden
This is a list of rivers in Niger. This list is arranged by drainage basin, with respective tributaries indented under each larger stream's name. Gulf of Guinea Niger River Sokoto River (Nigeria) Rima River (Nigeria) Goulbi de Maradi River Mékrou River Dallol Maouri Dallol Bosso Vallée de l'Azaouak Vallée de l'Ahzar Oued Ti-n-Amzi Tapoa River Goroubi River Sirba River Dargol River Béli River (Gorouol River) Lake Chad Yobe River Dilia River References Prentice-Hall, Inc., American World Atlas 1985 GEOnet Names Server Niger Rivers
The 1996 Rhythmic Gymnastics European Championships is the 12th edition of the Rhythmic Gymnastics European Championships, which took place from 29 May to 2 June in Asker, Norway. Medal winners Medal table References 1996 in gymnastics Rhythmic Gymnastics European Championships
Claude De Bruyn (1 September 1943 – 27 August 2020) was a Belgian serviceman and television presenter. Biography Lieutenant-Colonel De Bruyn was born in Uccle in 1943. He spent three years at the École royale des cadets before being admitted to the Royal Military Academy. Lastly, he attended the École royale de Gendarmerie. In 1966, he graduated from the University of Liège with a degree in criminology. In 1970, De Bruyn was hired by RTBF to host a weekly television program on road safety, titled Contacts. In 1978, the manual Feu vert pour le permis de conduire was published, intending to prepare drivers for their upcoming exams. De Bruyn left the program in 1980. He was promoted to the rank of Major in the Belgian Armed Forces and became District Commander of Seraing. In 1984, he was promoted to Lieutenant-Colonel. From 1986 to 1988, he presented the program Trafic, similar to Contacts, on the channel RTL-TVI. In 1996, the publishing house De Boeck took over the production of Feu vert pour le permis de conduire. De Bruyn continued his work for the manual with De Boeck until his retirement in 1999. However, he remained passionate about driving safety and fervently defended driving protocols. In 2007, his son, Cedric, began working for road safety. Claude De Bruyn died on 27 August 2020 at the age of 76. Publications Feu vert pour le permis de conduire Feu vert pour le guide References Belgian television presenters People from Uccle 1943 births 2020 deaths
Lalić is a surname. Notable people with the surname include: Aleksandra Lalić, Serbian fashion designer Bogdan Lalić, Croatian chess grandmaster Dražen Lalić, Croatian sociologist Gojko Lalić, Serbian American chemistry professor Ivan V. Lalić, Serbian poet Luka Lalić, Serbian football coach Maja Vidaković Lalić, Serbian architect Maria Lalić, British artist Marin Lalić, Croatian football player Mihailo Lalić, novelist of Serbian and Montenegrin literature Nataša Lalić, Serbian politician Slobodan Lalić, Serbian football player Susan Lalic, British chess grandmaster Veljko Lalić, Serbian journalist, editor and publicist Vik Lalić, Croatian football player Žanamari Lalić, Croatian pop singer See also Lalich, anglicized version Surnames of Croatian origin Surnames of Serbian origin
```prolog # The syslogd binds UDP socket on ::1. # The client writes a message into a ::1 UDP socket. # The syslogd writes it into a file and through a pipe. # The syslogd passes it via UDP to the loghost. # The server receives the message on its UDP socket. # Find the message in client, file, pipe, syslogd, server log. # Check that the file log contains the localhost name. # Check that fstat contains a bound UDP socket. use strict; use warnings; use Socket; our %args = ( client => { connect => { domain => AF_INET6, addr => "::1", port => 514 }, }, syslogd => { options => ["-U", "[::1]"], fstat => { qr/^root .* internet/ => 0, qr/ internet6 dgram udp \[::1\]:514$/ => 1, }, }, file => { loggrep => qr/ localhost /. get_testgrep(), }, ); 1; ```
The Lakeman Brothers were an English folk music trio, consisting of Sean Lakeman, Sam Lakeman and Seth Lakeman. They released one album, Three Piece Suite, in 1994 before forming the band Equation with Kate Rusby and Kathryn Roberts. Seth has gone on to have success as a solo artist, particularly with his Mercury Music Prize-nominated second album Kitty Jay (2004). Sean has recorded two albums with Kathryn Roberts and tours with Seth. Sam performs with his wife Cara Dillon. References Further reading English folk musical groups English musical trios Sibling musical trios
```c /* Duplicate a file descriptor result, avoiding clobbering STD{IN,OUT,ERR}_FILENO, with specific flags. This program is free software: you can redistribute it and/or modify (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the along with this program. If not, see <path_to_url */ /* Written by Paul Eggert and Eric Blake. */ #include <config.h> /* Specification. */ #include "unistd-safer.h" #include <fcntl.h> #include <unistd.h> /* Like dup, but do not return STDIN_FILENO, STDOUT_FILENO, or STDERR_FILENO. If FLAG contains O_CLOEXEC, behave like fcntl(F_DUPFD_CLOEXEC) rather than fcntl(F_DUPFD). */ int dup_safer_flag (int fd, int flag) { return fcntl (fd, (flag & O_CLOEXEC) ? F_DUPFD_CLOEXEC : F_DUPFD, STDERR_FILENO + 1); } ```
```java /* * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * * path_to_url * * Unless required by applicable law or agreed to in writing, software * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert; import lombok.Getter; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedSQLSegment; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.dml.InsertStatementTestCase; import javax.xml.bind.annotation.XmlElement; import java.util.LinkedList; import java.util.List; /** * Expected multi table conditional into else clause. */ @Getter public final class ExpectedMultiTableConditionalIntoElseClause extends AbstractExpectedSQLSegment { @XmlElement(name = "insert-statement") private final List<InsertStatementTestCase> insertTestCases = new LinkedList<>(); } ```
A grass court is one of the types of tennis court on which the sport of tennis, originally known as "lawn tennis", is played. Grass courts are made of grasses in different compositions depending on the tournament. Although grass courts are more traditional than other types of tennis courts, maintenance costs of grass courts are higher than those of hard courts and clay courts. Grass courts (in the absence of suitable covers) must be left for the day if rain appears, as the grass becomes very slippery when wet and will not dry for many hours. This is a disadvantage on outdoor courts compared to using hard and clay surfaces, where play can resume in 30 to 120 minutes after the end of rain (clay courts even remain playable in light rain). Grass courts are most common in the United Kingdom and Australia, although the Northeastern United States also has some private grass courts. Play style Because grass courts tend to be slippery, the ball often skids and bounces low while retaining most of its speed, rarely rising above knee height. In addition, there are often bad bounces. As a result, players must reach the ball faster relative to other surfaces, and rallies are likely to be comparatively brief; therefore, speed and power are rewarded on grass. On grass, the serve and return play a major part in determining the outcome of the point, increasing the importance of serving effectively, and maintaining focus in exchanges which can be heavily influenced by lapses in concentration. A grass court favours a serve and volley style of play. Players The most successful singles players on grass in the Open Era have been Martina Navratilova, Roger Federer, Margaret Court, Billie Jean King, Pete Sampras, Steffi Graf, Serena Williams, Novak Djokovic, Rod Laver, John Newcombe, Evonne Goolagong Cawley, Björn Borg, Chris Evert, and Venus Williams. All have won at least five major singles titles on grass: Navratilova won twelve, Federer and Court eight each, King, Sampras, Graf, Serena Williams, and Djokovic seven each. Other players who have been relatively successful on grass during the Open Era are Arthur Ashe, Ken Rosewall, Boris Becker, John McEnroe, Stefan Edberg, Virginia Wade, Rafael Nadal, Petra Kvitová and Andy Murray. Among men, Sampras is lauded by many tennis analysts as one of the greatest grass-court players of all time. He won seven Wimbledon singles titles in eight years from 1993 through 2000, with his only loss in that span coming in the 1996 quarterfinals. Roger Federer is statistically the most successful male grass court player of the Open Era: he has won an Open Era-record 19 grass court titles, including ten Halle Open titles, an all-time record eight Wimbledon singles titles, and a Stuttgart Open title. Federer has contested an all-time record twelve Wimbledon singles finals, and has the longest grass court winning streak in the Open Era, when he won 65 consecutive matches on grass between 2003 and 2008, until he was beaten by Rafael Nadal in the 2008 Wimbledon final. The most successful active female grass-court player is Venus Williams, with five Wimbledon singles titles. Venus has won five out of her nine Wimbledon finals appearances (losing three to her sister, Serena), and the pair have won six titles in the ladies' doubles together. Professional tournaments played on grass Compared to clay and hard courts, the professional grass court season is much shorter. Until 2014 it consisted only of Wimbledon, two weeks of tournaments in Britain and continental Europe leading up to it, and the Hall of Fame Tennis Championships at Newport, Rhode Island, United States the week after. In 2015 it was extended, with an extra week between the French Open and Wimbledon. On the ATP Tour, the Stuttgart Open became a grass court tournament that year. In 2017 a new ATP 250 tournament in Antalya, Turkey, was played a week before Wimbledon. On the WTA Tour Mallorca, Spain, began hosting a grass court tournament beginning in 2016. Summer grass season See also Clay court Hardcourt Carpet court References External links LTA – Grass Court Guidance Tennis court surfaces Grass field surfaces
```xml import { Component, OnInit } from '@angular/core'; import { MenuItem, MessageService } from 'primeng/api'; import { Code } from '@domain/code'; @Component({ selector: 'linear-doc', template: ` <app-docsectiontext> <p>SpeedDial items are defined with the <i>model</i> property based on MenuModel API. Default orientation of the items is linear and <i>direction</i> property is used to define the position of the items related to the button.</p> </app-docsectiontext> <div class="card"> <div style="height: 500px; position: relative;" class="speeddial-linear-demo"> <p-toast /> <p-speedDial [model]="items" direction="up" /> <p-speedDial [model]="items" direction="down" /> <p-speedDial [model]="items" direction="left" /> <p-speedDial [model]="items" direction="right" /> </div> </div> <app-code [code]="code" selector="speed-dial-linear-demo"></app-code> `, providers: [MessageService] }) export class LinearDoc implements OnInit { items: MenuItem[] | undefined; constructor(private messageService: MessageService) {} ngOnInit() { this.items = [ { icon: 'pi pi-pencil', command: () => { this.messageService.add({ severity: 'info', summary: 'Add', detail: 'Data Added' }); } }, { icon: 'pi pi-refresh', command: () => { this.messageService.add({ severity: 'success', summary: 'Update', detail: 'Data Updated' }); } }, { icon: 'pi pi-trash', command: () => { this.messageService.add({ severity: 'error', summary: 'Delete', detail: 'Data Deleted' }); } }, { icon: 'pi pi-upload', routerLink: ['/fileupload'] }, { icon: 'pi pi-external-link', target: '_blank', url: 'path_to_url } ]; } code: Code = { basic: `<p-speedDial [model]="items" direction="up" /> <p-speedDial [model]="items" direction="down" /> <p-speedDial [model]="items" direction="left" /> <p-speedDial [model]="items" direction="right" />`, html: `<div class="card"> <div style="height: 500px; position: relative;" class="speeddial-linear-demo"> <p-toast /> <p-speedDial [model]="items" direction="up" /> <p-speedDial [model]="items" direction="down" /> <p-speedDial [model]="items" direction="left" /> <p-speedDial [model]="items" direction="right" /> </div> </div>`, typescript: `import { Component, OnInit } from '@angular/core'; import { MenuItem, MessageService } from 'primeng/api'; import { SpeedDialModule } from 'primeng/speeddial'; import { ToastModule } from 'primeng/toast'; @Component({ selector: 'speed-dial-linear-demo', templateUrl: './speed-dial-linear-demo.html', styles: [ \`:host ::ng-deep { .speeddial-linear-demo { .p-speeddial-direction-up { left: calc(50% - 2rem); bottom: 0; } .p-speeddial-direction-down { left: calc(50% - 2rem); top: 0; } .p-speeddial-direction-left { right: 0; top: calc(50% - 2rem); } .p-speeddial-direction-right { left: 0; top: calc(50% - 2rem); } } }\` ], standalone: true, imports: [SpeedDialModule, ToastModule], providers: [MessageService] }) export class SpeedDialLinearDemo implements OnInit { items: MenuItem[] | undefined; constructor(private messageService: MessageService) {} ngOnInit() { this.items = [ { icon: 'pi pi-pencil', command: () => { this.messageService.add({ severity: 'info', summary: 'Add', detail: 'Data Added' }); } }, { icon: 'pi pi-refresh', command: () => { this.messageService.add({ severity: 'success', summary: 'Update', detail: 'Data Updated' }); } }, { icon: 'pi pi-trash', command: () => { this.messageService.add({ severity: 'error', summary: 'Delete', detail: 'Data Deleted' }); } }, { icon: 'pi pi-upload', routerLink: ['/fileupload'] }, { icon: 'pi pi-external-link', target:'_blank', url: 'path_to_url } ]; } }`, scss: `:host ::ng-deep { .speeddial-linear-demo { .p-speeddial-direction-up { left: calc(50% - 2rem); bottom: 0; } .p-speeddial-direction-down { left: calc(50% - 2rem); top: 0; } .p-speeddial-direction-left { right: 0; top: calc(50% - 2rem); } .p-speeddial-direction-right { left: 0; top: calc(50% - 2rem); } } }` }; } ```
```go // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // // along with the go-ethereum library. If not, see <path_to_url package core import ( "github.com/ethereum/go-ethereum/consensus/istanbul" istanbulcommon "github.com/ethereum/go-ethereum/consensus/istanbul/common" ibfttypes "github.com/ethereum/go-ethereum/consensus/istanbul/ibft/types" ) func (c *core) handleRequest(request *istanbul.Request) error { logger := c.logger.New("state", c.state, "seq", c.current.sequence) if err := c.checkRequestMsg(request); err != nil { if err == istanbulcommon.ErrInvalidMessage { logger.Warn("invalid request") return err } logger.Warn("unexpected request", "err", err, "number", request.Proposal.Number(), "hash", request.Proposal.Hash()) return err } logger.Trace("handleRequest", "number", request.Proposal.Number(), "hash", request.Proposal.Hash()) c.current.pendingRequest = request if c.state == ibfttypes.StateAcceptRequest { c.sendPreprepare(request) } return nil } // check request state // return errInvalidMessage if the message is invalid // return errFutureMessage if the sequence of proposal is larger than current sequence // return errOldMessage if the sequence of proposal is smaller than current sequence func (c *core) checkRequestMsg(request *istanbul.Request) error { if request == nil || request.Proposal == nil { return istanbulcommon.ErrInvalidMessage } if c := c.current.sequence.Cmp(request.Proposal.Number()); c > 0 { return istanbulcommon.ErrOldMessage } else if c < 0 { return istanbulcommon.ErrFutureMessage } else { return nil } } func (c *core) storeRequestMsg(request *istanbul.Request) { logger := c.logger.New("state", c.state) logger.Trace("Store future request", "number", request.Proposal.Number(), "hash", request.Proposal.Hash()) c.pendingRequestsMu.Lock() defer c.pendingRequestsMu.Unlock() c.pendingRequests.Push(request, float32(-request.Proposal.Number().Int64())) } func (c *core) processPendingRequests() { c.pendingRequestsMu.Lock() defer c.pendingRequestsMu.Unlock() for !(c.pendingRequests.Empty()) { m, prio := c.pendingRequests.Pop() r, ok := m.(*istanbul.Request) if !ok { c.logger.Warn("Malformed request, skip", "msg", m) continue } // Push back if it's a future message err := c.checkRequestMsg(r) if err != nil { if err == istanbulcommon.ErrFutureMessage { c.logger.Trace("Stop processing request", "number", r.Proposal.Number(), "hash", r.Proposal.Hash()) c.pendingRequests.Push(m, prio) break } c.logger.Trace("Skip the pending request", "number", r.Proposal.Number(), "hash", r.Proposal.Hash(), "err", err) continue } c.logger.Trace("Post pending request", "number", r.Proposal.Number(), "hash", r.Proposal.Hash()) go c.sendEvent(istanbul.RequestEvent{ Proposal: r.Proposal, }) } } ```
```smalltalk namespace Npgsql.EntityFrameworkCore.PostgreSQL.Metadata; /// <summary> /// Defines strategies to use when generating values for database columns. /// </summary> /// <remarks> /// See <see href="path_to_url">Model building conventions</see>. /// </remarks> public enum NpgsqlValueGenerationStrategy { /// <summary> /// No Npgsql-specific strategy. /// </summary> None, /// <summary> /// <para> /// A sequence-based hi-lo pattern where blocks of IDs are allocated from the server and /// used client-side for generating keys. /// </para> /// <para> /// This is an advanced pattern--only use this strategy if you are certain it is what you need. /// </para> /// </summary> SequenceHiLo, /// <summary> /// <para> /// Selects the serial column strategy, which is a regular column backed by an auto-created index. /// </para> /// <para> /// If you are creating a new project on PostgreSQL 10 or above, consider using <see cref="IdentityByDefaultColumn" /> instead. /// </para> /// </summary> SerialColumn, /// <summary> /// <para>Selects the always-identity column strategy (a value cannot be provided).</para> /// <para>Available only starting PostgreSQL 10.</para> /// </summary> IdentityAlwaysColumn, /// <summary> /// <para>Selects the by-default-identity column strategy (a value can be provided to override the identity mechanism).</para> /// <para>Available only starting PostgreSQL 10.</para> /// </summary> IdentityByDefaultColumn, /// <summary> /// A pattern that uses a database sequence to generate values for the column. /// </summary> Sequence } /// <summary> /// Extension methods over <see cref="NpgsqlValueGenerationStrategy" />. /// </summary> public static class NpgsqlValueGenerationStrategyExtensions { /// <summary> /// Whether the given strategy is either <see cref="NpgsqlValueGenerationStrategy.IdentityByDefaultColumn" /> or /// <see cref="NpgsqlValueGenerationStrategy.IdentityAlwaysColumn" />. /// </summary> public static bool IsIdentity(this NpgsqlValueGenerationStrategy strategy) => strategy is NpgsqlValueGenerationStrategy.IdentityByDefaultColumn or NpgsqlValueGenerationStrategy.IdentityAlwaysColumn; /// <summary> /// Whether the given strategy is either <see cref="NpgsqlValueGenerationStrategy.IdentityByDefaultColumn" /> or /// <see cref="NpgsqlValueGenerationStrategy.IdentityAlwaysColumn" />. /// </summary> public static bool IsIdentity(this NpgsqlValueGenerationStrategy? strategy) => strategy is NpgsqlValueGenerationStrategy.IdentityByDefaultColumn or NpgsqlValueGenerationStrategy.IdentityAlwaysColumn; } ```
```xml /* * Wire * * This program is free software: you can redistribute it and/or modify * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * along with this program. If not, see path_to_url * */ import React from 'react'; import {fireEvent, render, screen, act} from '@testing-library/react'; import {EntropyCanvas} from './EntropyCanvas'; import {EntropyData} from '../../util/Entropy'; import {withIntl, withTheme} from '../util/test/TestUtil'; require('jest-canvas-mock'); describe('EntropyCanvas', () => { afterEach(() => { jest.clearAllMocks(); }); const setStateMock = jest.fn(); const useStateMock: any = (useState: any) => [useState, setStateMock]; jest.spyOn(React, 'useState').mockImplementation(useStateMock); const setErrorMock = jest.fn(); const useErrorMock: any = (useState: any) => [useState, setErrorMock]; jest.spyOn(React, 'useState').mockImplementation(useErrorMock); const [, setEntropy] = useStateMock([]); const [, setError] = useErrorMock(null); const mockOnProgress = jest.fn((entropyData: EntropyData, percent: number, pause: boolean) => { setEntropy(entropyData); setError(pause); }); it('reacts to drawing', async () => { render( withTheme( withIntl( <EntropyCanvas onProgress={mockOnProgress} sizeX={256} sizeY={256} minEntropyBits={3000} minFrames={300} />, ), ), ); const canvas = screen.getByTestId('element-entropy-canvas'); await act(async () => { fireEvent.mouseEnter(canvas); fireEvent.mouseMove(canvas); fireEvent.mouseLeave(canvas); }); expect(setEntropy).toHaveBeenCalledTimes(1); expect(setError).toHaveBeenCalledTimes(7); }); it('starts drawing again after leaving canvas', async () => { render( withTheme( withIntl( <EntropyCanvas onProgress={mockOnProgress} sizeX={256} sizeY={256} minEntropyBits={3000} minFrames={300} />, ), ), ); const canvas = screen.getByTestId('element-entropy-canvas'); await act(async () => { fireEvent.mouseEnter(canvas); fireEvent.mouseMove(canvas); fireEvent.mouseLeave(canvas); fireEvent.mouseEnter(canvas); fireEvent.mouseMove(canvas); fireEvent.mouseLeave(canvas); }); expect(setEntropy).toHaveBeenCalledTimes(2); expect(setError).toHaveBeenCalledTimes(14); }); }); ```
```javascript /** * Basic status bar component that can be used as the bottom toolbar of any {@link Ext.Panel}. In addition to * supporting the standard {@link Ext.toolbar.Toolbar} interface for adding buttons, menus and other items, the StatusBar * provides a greedy status element that can be aligned to either side and has convenient methods for setting the * status text and icon. You can also indicate that something is processing using the {@link #showBusy} method. * * Ext.create('Ext.Panel', { * title: 'StatusBar', * // etc. * bbar: Ext.create('Ext.ux.StatusBar', { * id: 'my-status', * * // defaults to use when the status is cleared: * defaultText: 'Default status text', * defaultIconCls: 'default-icon', * * // values to set initially: * text: 'Ready', * iconCls: 'ready-icon', * * // any standard Toolbar items: * items: [{ * text: 'A Button' * }, '-', 'Plain Text'] * }) * }); * * // Update the status bar later in code: * var sb = Ext.getCmp('my-status'); * sb.setStatus({ * text: 'OK', * iconCls: 'ok-icon', * clear: true // auto-clear after a set interval * }); * * // Set the status bar to show that something is processing: * sb.showBusy(); * * // processing.... * * sb.clearStatus(); // once completeed * */ Ext.define('Ext.ux.statusbar.StatusBar', { extend: 'Ext.toolbar.Toolbar', alternateClassName: 'Ext.ux.StatusBar', alias: 'widget.statusbar', requires: ['Ext.toolbar.TextItem'], /** * @cfg {String} statusAlign * The alignment of the status element within the overall StatusBar layout. When the StatusBar is rendered, * it creates an internal div containing the status text and icon. Any additional Toolbar items added in the * StatusBar's {@link #cfg-items} config, or added via {@link #method-add} or any of the supported add* methods, will be * rendered, in added order, to the opposite side. The status element is greedy, so it will automatically * expand to take up all sapce left over by any other items. Example usage: * * // Create a left-aligned status bar containing a button, * // separator and text item that will be right-aligned (default): * Ext.create('Ext.Panel', { * title: 'StatusBar', * // etc. * bbar: Ext.create('Ext.ux.statusbar.StatusBar', { * defaultText: 'Default status text', * id: 'status-id', * items: [{ * text: 'A Button' * }, '-', 'Plain Text'] * }) * }); * * // By adding the statusAlign config, this will create the * // exact same toolbar, except the status and toolbar item * // layout will be reversed from the previous example: * Ext.create('Ext.Panel', { * title: 'StatusBar', * // etc. * bbar: Ext.create('Ext.ux.statusbar.StatusBar', { * defaultText: 'Default status text', * id: 'status-id', * statusAlign: 'right', * items: [{ * text: 'A Button' * }, '-', 'Plain Text'] * }) * }); */ /** * @cfg {String} [defaultText=''] * The default {@link #text} value. This will be used anytime the status bar is cleared with the * `useDefaults:true` option. */ /** * @cfg {String} [defaultIconCls=''] * The default {@link #iconCls} value (see the iconCls docs for additional details about customizing the icon). * This will be used anytime the status bar is cleared with the `useDefaults:true` option. */ /** * @cfg {String} text * A string that will be <b>initially</b> set as the status message. This string * will be set as innerHTML (html tags are accepted) for the toolbar item. * If not specified, the value set for {@link #defaultText} will be used. */ /** * @cfg {String} [iconCls=''] * A CSS class that will be **initially** set as the status bar icon and is * expected to provide a background image. * * Example usage: * * // Example CSS rule: * .x-statusbar .x-status-custom { * padding-left: 25px; * background: transparent url(images/custom-icon.gif) no-repeat 3px 2px; * } * * // Setting a default icon: * var sb = Ext.create('Ext.ux.statusbar.StatusBar', { * defaultIconCls: 'x-status-custom' * }); * * // Changing the icon: * sb.setStatus({ * text: 'New status', * iconCls: 'x-status-custom' * }); */ /** * @cfg {String} cls * The base class applied to the containing element for this component on render. */ cls : 'x-statusbar', /** * @cfg {String} busyIconCls * The default {@link #iconCls} applied when calling {@link #showBusy}. * It can be overridden at any time by passing the `iconCls` argument into {@link #showBusy}. */ busyIconCls : 'x-status-busy', /** * @cfg {String} busyText * The default {@link #text} applied when calling {@link #showBusy}. * It can be overridden at any time by passing the `text` argument into {@link #showBusy}. */ busyText : 'Loading...', /** * @cfg {Number} autoClear * The number of milliseconds to wait after setting the status via * {@link #setStatus} before automatically clearing the status text and icon. * Note that this only applies when passing the `clear` argument to {@link #setStatus} * since that is the only way to defer clearing the status. This can * be overridden by specifying a different `wait` value in {@link #setStatus}. * Calls to {@link #clearStatus} always clear the status bar immediately and ignore this value. */ autoClear : 5000, /** * @cfg {String} emptyText * The text string to use if no text has been set. If there are no other items in * the toolbar using an empty string (`''`) for this value would end up in the toolbar * height collapsing since the empty string will not maintain the toolbar height. * Use `''` if the toolbar should collapse in height vertically when no text is * specified and there are no other items in the toolbar. */ emptyText : '&#160;', // private activeThreadId : 0, // private initComponent : function(){ var right = this.statusAlign === 'right'; this.callParent(arguments); this.currIconCls = this.iconCls || this.defaultIconCls; this.statusEl = Ext.create('Ext.toolbar.TextItem', { cls: 'x-status-text ' + (this.currIconCls || ''), text: this.text || this.defaultText || '' }); if (right) { this.cls += ' x-status-right'; this.add('->'); this.add(this.statusEl); } else { this.insert(0, this.statusEl); this.insert(1, '->'); } }, /** * Sets the status {@link #text} and/or {@link #iconCls}. Also supports automatically clearing the * status that was set after a specified interval. * * Example usage: * * // Simple call to update the text * statusBar.setStatus('New status'); * * // Set the status and icon, auto-clearing with default options: * statusBar.setStatus({ * text: 'New status', * iconCls: 'x-status-custom', * clear: true * }); * * // Auto-clear with custom options: * statusBar.setStatus({ * text: 'New status', * iconCls: 'x-status-custom', * clear: { * wait: 8000, * anim: false, * useDefaults: false * } * }); * * @param {Object/String} config A config object specifying what status to set, or a string assumed * to be the status text (and all other options are defaulted as explained below). A config * object containing any or all of the following properties can be passed: * * @param {String} config.text The status text to display. If not specified, any current * status text will remain unchanged. * * @param {String} config.iconCls The CSS class used to customize the status icon (see * {@link #iconCls} for details). If not specified, any current iconCls will remain unchanged. * * @param {Boolean/Number/Object} config.clear Allows you to set an internal callback that will * automatically clear the status text and iconCls after a specified amount of time has passed. If clear is not * specified, the new status will not be auto-cleared and will stay until updated again or cleared using * {@link #clearStatus}. If `true` is passed, the status will be cleared using {@link #autoClear}, * {@link #defaultText} and {@link #defaultIconCls} via a fade out animation. If a numeric value is passed, * it will be used as the callback interval (in milliseconds), overriding the {@link #autoClear} value. * All other options will be defaulted as with the boolean option. To customize any other options, * you can pass an object in the format: * * @param {Number} config.clear.wait The number of milliseconds to wait before clearing * (defaults to {@link #autoClear}). * @param {Boolean} config.clear.anim False to clear the status immediately once the callback * executes (defaults to true which fades the status out). * @param {Boolean} config.clear.useDefaults False to completely clear the status text and iconCls * (defaults to true which uses {@link #defaultText} and {@link #defaultIconCls}). * * @return {Ext.ux.statusbar.StatusBar} this */ setStatus : function(o) { var me = this; o = o || {}; Ext.suspendLayouts(); if (Ext.isString(o)) { o = {text:o}; } if (o.text !== undefined) { me.setText(o.text); } if (o.iconCls !== undefined) { me.setIcon(o.iconCls); } if (o.clear) { var c = o.clear, wait = me.autoClear, defaults = {useDefaults: true, anim: true}; if (Ext.isObject(c)) { c = Ext.applyIf(c, defaults); if (c.wait) { wait = c.wait; } } else if (Ext.isNumber(c)) { wait = c; c = defaults; } else if (Ext.isBoolean(c)) { c = defaults; } c.threadId = this.activeThreadId; Ext.defer(me.clearStatus, wait, me, [c]); } Ext.resumeLayouts(true); return me; }, /** * Clears the status {@link #text} and {@link #iconCls}. Also supports clearing via an optional fade out animation. * * @param {Object} [config] A config object containing any or all of the following properties. If this * object is not specified the status will be cleared using the defaults below: * @param {Boolean} config.anim True to clear the status by fading out the status element (defaults * to false which clears immediately). * @param {Boolean} config.useDefaults True to reset the text and icon using {@link #defaultText} and * {@link #defaultIconCls} (defaults to false which sets the text to '' and removes any existing icon class). * * @return {Ext.ux.statusbar.StatusBar} this */ clearStatus : function(o) { o = o || {}; var me = this, statusEl = me.statusEl; if (o.threadId && o.threadId !== me.activeThreadId) { // this means the current call was made internally, but a newer // thread has set a message since this call was deferred. Since // we don't want to overwrite a newer message just ignore. return me; } var text = o.useDefaults ? me.defaultText : me.emptyText, iconCls = o.useDefaults ? (me.defaultIconCls ? me.defaultIconCls : '') : ''; if (o.anim) { // animate the statusEl Ext.Element statusEl.el.puff({ remove: false, useDisplay: true, callback: function() { statusEl.el.show(); me.setStatus({ text: text, iconCls: iconCls }); } }); } else { me.setStatus({ text: text, iconCls: iconCls }); } return me; }, /** * Convenience method for setting the status text directly. For more flexible options see {@link #setStatus}. * @param {String} text (optional) The text to set (defaults to '') * @return {Ext.ux.statusbar.StatusBar} this */ setText : function(text) { var me = this; me.activeThreadId++; me.text = text || ''; if (me.rendered) { me.statusEl.setText(me.text); } return me; }, /** * Returns the current status text. * @return {String} The status text */ getText : function(){ return this.text; }, /** * Convenience method for setting the status icon directly. For more flexible options see {@link #setStatus}. * See {@link #iconCls} for complete details about customizing the icon. * @param {String} iconCls (optional) The icon class to set (defaults to '', and any current icon class is removed) * @return {Ext.ux.statusbar.StatusBar} this */ setIcon : function(cls) { var me = this; me.activeThreadId++; cls = cls || ''; if (me.rendered) { if (me.currIconCls) { me.statusEl.removeCls(me.currIconCls); me.currIconCls = null; } if (cls.length > 0) { me.statusEl.addCls(cls); me.currIconCls = cls; } } else { me.currIconCls = cls; } return me; }, /** * Convenience method for setting the status text and icon to special values that are pre-configured to indicate * a "busy" state, usually for loading or processing activities. * * @param {Object/String} config (optional) A config object in the same format supported by {@link #setStatus}, or a * string to use as the status text (in which case all other options for setStatus will be defaulted). Use the * `text` and/or `iconCls` properties on the config to override the default {@link #busyText} * and {@link #busyIconCls} settings. If the config argument is not specified, {@link #busyText} and * {@link #busyIconCls} will be used in conjunction with all of the default options for {@link #setStatus}. * @return {Ext.ux.statusbar.StatusBar} this */ showBusy : function(o){ if (Ext.isString(o)) { o = { text: o }; } o = Ext.applyIf(o || {}, { text: this.busyText, iconCls: this.busyIconCls }); return this.setStatus(o); } }); ```
"Djomb" is a song performed by French singer Bosh, released in 2020. Commercially, it peaked at number one in France and Wallonia. On 9 August, Italian rapper J-Ax released a remix of the song with Fabri Fibra. Charts Weekly charts Year-end charts Certifications References 2020 singles 2020 songs SNEP Top Singles number-one singles Ultratop 50 Singles (Wallonia) number-one singles French-language songs Pop-rap songs
Svetlana Stepanovna Semyonova (; born 11 May 1958) is a Russian former rower who competed in the 1980 Summer Olympics. References 1958 births Living people Russian female rowers Soviet female rowers Olympic rowers for the Soviet Union Rowers at the 1980 Summer Olympics Olympic bronze medalists for the Soviet Union Olympic medalists in rowing Medalists at the 1980 Summer Olympics World Rowing Championships medalists for the Soviet Union Sportspeople from Pskov
```objective-c #include "DoublyLinkedList.h" template <class T> DoublyLinkedList<T>::DoublyLinkedList() { } template <class T> void DoublyLinkedList<T>::insert(Node* previousNode, Node* newNode) { if (previousNode == nullptr) { // Is the first node if (head != nullptr) { // The list has more elements newNode->next = head; newNode->next->previous = newNode; }else { newNode->next = nullptr; } head = newNode; head->previous = nullptr; } else { if (previousNode->next == nullptr){ // Is the last node previousNode->next = newNode; newNode->next = nullptr; }else { // Is a middle node newNode->next = previousNode->next; if (newNode->next != nullptr){ newNode->next->previous = newNode; } previousNode->next = newNode; newNode->previous = previousNode; } } } template <class T> void DoublyLinkedList<T>::remove(Node* deleteNode) { if (deleteNode->previous == nullptr){ // Is the first node if (deleteNode->next == nullptr){ // List only has one element head = nullptr; }else { // List has more elements head = deleteNode->next; head->previous = nullptr; } }else { if (deleteNode->next == nullptr){ // Is the last node deleteNode->previous->next = nullptr; }else { // Middle node deleteNode->previous->next = deleteNode->next; deleteNode->next->previous = deleteNode->previous; } } } ```
Cratia is an extinct frog which existed in Brazil during the Early Cretaceous (Aptian). Fossils were discovered in and named after the Crato Formation. It was named by Ana M. Báez, Geraldo J.B. Moura and Raúl O. Gómez in 2009, and the type species is Cratia gracilis. References Early Cretaceous frogs Aptian life Early Cretaceous animals of South America Cretaceous Brazil Fossils of Brazil Fossil taxa described in 2009
Shockoe Bottom Station, located at the corner of Main and 24th, will be the Richmond, Virginia bus station site of the GRTC Bus Rapid Transit route. Station layout References External links Shockoe Bottom station Buildings and structures in Richmond, Virginia GRTC Pulse stations 2018 establishments in Virginia Bus stations in Virginia Transport infrastructure completed in 2018
```python import contextvars import time from contextvars import ContextVar ContextVar("cv", default=[]) # bad ContextVar("cv", default=list()) # bad ContextVar("cv", default=set()) # bad ContextVar("cv", default=time.time()) # bad (B008-like) contextvars.ContextVar("cv", default=[]) # bad # good ContextVar("cv", default=()) contextvars.ContextVar("cv", default=()) ContextVar("cv", default=tuple()) # see tests/b006_b008.py for more comprehensive tests ```
```c f(double x){double y;y=x/0.5;if(y<0.1)y=1.0;} ```
```objective-c /* * */ #ifndef _MEC_PWM_H #define _MEC_PWM_H #include <stdint.h> #include <stddef.h> #define MCHP_PWM_INST_SPACING 0x10u #define MCHP_PWM_INST_SPACING_P2 4u /* PWM Count On register */ #define MCHP_PWM_COUNT_ON_REG_OFS 0u #define MCHP_PWM_COUNT_ON_MASK 0xffffu /* PWM Count Off register */ #define MCHP_PWM_COUNT_OFF_REG_OFS 4u #define MCHP_PWM_COUNT_OFF_MASK 0xffffu /* PWM Configuration Register */ #define MCHP_PWM_CONFIG_REG_OFS 8u #define MCHP_PWM_CONFIG_MASK 0x7fu /* * Enable and start PWM. Clearing this bit resets internal counters. * COUNT_ON and COUNT_OFF registers are not affected by enable bit. */ #define MCHP_PWM_CFG_ENABLE_POS 0 #define MCHP_PWM_CFG_ENABLE BIT(MCHP_PWM_CFG_ENABLE_POS) /* Clock select */ #define MCHP_PWM_CFG_CLK_SEL_POS 1u #define MCHP_PWM_CFG_CLK_SEL_48M 0u #define MCHP_PWM_CFG_CLK_SEL_100K BIT(MCHP_PWM_CFG_CLK_SEL_POS) /* * ON state polarity. * Default ON state is High. */ #define MCHP_PWM_CFG_ON_POL_POS 2u #define MCHP_PWM_CFG_ON_POL_HI 0u #define MCHP_PWM_CFG_ON_POL_LO BIT(MCHP_PWM_CFG_ON_POL_POS) /* * Clock pre-divider * Clock divider value = pre-divider + 1 */ #define MCHP_PWM_CFG_CLK_PRE_DIV_POS 3u #define MCHP_PWM_CFG_CLK_PRE_DIV_MASK0 0x0fU #define MCHP_PWM_CFG_CLK_PRE_DIV_MASK \ SHLU32(0x0fu, MCHP_PWM_CFG_CLK_PRE_DIV_POS) #define MCHP_PWM_CFG_CLK_PRE_DIV(n) \ SHLU32((n) & MCHP_PWM_CFG_CLK_PRE_DIV_MASK0, \ MCHP_PWM_CFG_CLK_PRE_DIV_POS) /* PWM input frequencies selected in configuration register. */ #define MCHP_PWM_INPUT_FREQ_HI 48000000u #define MCHP_PWM_INPUT_FREQ_LO 100000u /* * PWM Frequency = * (1 / (pre_div + 1)) * PWM_INPUT_FREQ / ((COUNT_ON+1) + (COUNT_OFF+1)) * * PWM Duty Cycle = * (COUNT_ON+1) / ((COUNT_ON+1) + (COUNT_OFF + 1)) */ /** @brief PWM controller */ struct pwm_regs { volatile uint32_t COUNT_ON; volatile uint32_t COUNT_OFF; volatile uint32_t CONFIG; }; #endif /* #ifndef _MEC_PWM_H */ ```
Ramón Mellado Parsons (October 31, 1904 - June 7, 1985) was an educator, writer, politician and a former Puerto Rico Secretary of Education. Early days Had a bachelor's degree in Science from the University of Puerto Rico in 1927. In 1940 earned a Master of Arts from the Teachers College, Columbia University in New York and His Doctorate in Education in 1947. Education career He worked as a science teacher at Julio L. Vizcarrondo Coronado High School in Carolina, Puerto Rico. He was subsequently appointed Superintendent of Schools (1932–34), General Supervisor of Sciences), Deputy Commissioner of Public Instruction and appointed Secretary of Education of Puerto Rico (1969-1971). As a university educator he began in 1943, serving as Professor and Director of the Department of Pedagogy of the University of Puerto Rico, Río Piedras Campus until 1948. At this study center he held the positions of Dean of Administration (1948–56) and Professor of the Graduate School of Pedagogy (1957–68). In 1948, he published "Culture and education in Puerto Rico". Politics He was delegated to the Constitutional Convention of Puerto Rico in 1952. In 1972 he was elected Senator At-large by the New Progressive Party. Legacy An elementary school in Carolina, Puerto Rico was named after him. References 1904 births 1985 deaths Teachers College, Columbia University alumni Members of the Senate of Puerto Rico People from Carolina, Puerto Rico Puerto Rican educators Puerto Rican writers Secretaries of Education of Puerto Rico University of Puerto Rico alumni University of Puerto Rico faculty 20th-century American politicians
Marcelo Fromer (December 3, 1961 – June 13, 2001) was the guitarist of Brazilian rock band Titãs. One of the founding members and also the band's manager, he died in 2001, after being hit by a motorcycle while jogging. Early life and youth Marcelo Fromer grew up in São Paulo in a house frequently visited by friends like Branco Mello, whom he met in 1974, in the Hugo Sarmento school. When he was 15 years old, he discovered The Beatles, Chico Buarque and the Tropicália, and started having guitar lessons with Luiz Tati, of the group Rumo. While studying at Equipe school, he met Tony Bellotto and formed the Trio Mamão with him and Mello. As he hated singing, he was the only one dedicated exclusively to the acoustic guitar. Also at Equipe school, he, Mello and other classmates created the "Papagaio" magazine, featuring comics, poetry and texts about the internal decisions of the school. But he would never stop composing and playing. Soccer would also play an important role on his life. Trusty supporter of São Paulo, he managed to train for the young team of the club. Once he finished school, he entered Linguistics at USP together with Mello, but both quit it two years later. Works with Titãs In 1981, Titãs made their first performance (under the name Titãs do Iê-Iê) at the event “A Idade da Pedra Jovem”. Fromer became a little excited and decided to play the electric guitar like an acoustic one, without a pick. At the end of the night, his fingers were seriously injured and there was blood all over his white Giannini. In 1984, with the releasing of Titãs first album, Fromer showed that he could work as a businessman, and he was chosen to be the manager the band. Another passion of Fromer, which appeared mostly during the first tours of the band, was the gastronomy. He would always choose the restaurants were the group should eat throughout the country. This passion would be the subject of the song "As Aventuras do Guitarrista Gourmet Atrás da Refeição Ideal", featured at Como Estão Vocês?. His "business-gourmet" lives would join each other at Rock Dog (word play with Hot Dog), a snack bar specialized in hot dogs that he opened in 1989, in São Paulo, in a partnership of his brothers Thiago and Cuca, and the band members Mello and Bellotto. In 2000, he became a partner of the Campana, a Pizza shop also located in São Paulo. A year before, he released the book Você Tem Fome de Quê?, in which he listed cookery recipes from many restaurants of Brazil, along with tablatures and curiosities of Titãs main hits. Other works Fromer left unfinished a biography of former football player and TV commentator Walter Casagrande, his personal friend. The project was resumed in 2008 by Gilvan Ribeiro and released in 2013. Death On June 11, 2001, a day before the start of the recording of the band's 13th album,A Melhor Banda de Todos os Tempos da Última Semana, Fromer was crossing Europa Avenue in southern São Paulo when a red Honda CG125 motorcycle, driven by Erasmo Castro da Costa Jr., struck him. Erasmo called an ambulance, but fled the scene when the police arrived, as his driving license was no longer valid. Two days later, Fromer died in hospital. Presumably, his head struck Erasmo's helmet. In July 2002, Erasmo was found by the police. He stated he did not know the man he struck was Fromer until his death. He also alleged that Fromer was to blame for the accident, as there was a crosswalk 15 meters away; Fromer decided to cross the avenue between the cars, making it impossible for Erasmo to see Fromer. Fromer's family authorized the donation of his organs. His heart, liver, pancreas, kidneys, and his corneas were all donated. On September 7, 2001, a skyway named after him was inaugurated over Avenida Juscelino Kubitschek, in São Paulo. Personal life Fromer lived in São Paulo. By the end of the 1980s, he was in a relationship with actress Betty Gofman. His first marriage with Martha Locatelli Fromer brought along his first child, Susy. His second marriage to Ana Cristina Martinelli, also known as "Tina", resulted in the birth of children Alice and Max. References Fromer’s page at Titãs official web site Folha’s news about Fromer’s death External links Titãs official website 1961 births 2001 deaths Brazilian composers Brazilian guitarists Brazilian male guitarists Brazilian Jews Brazilian rock musicians Brazilian male songwriters Pedestrian road incident deaths Musicians from São Paulo Road incident deaths in Brazil Titãs members Acoustic guitarists University of São Paulo alumni Rhythm guitarists 20th-century guitarists 20th-century male musicians
```xml import React, { useCallback, useState } from "react"; import APP_CONSTANTS from "config/constants"; import { BiArrowBack } from "@react-icons/all-files/bi/BiArrowBack"; import { RQButton } from "lib/design-system/components"; import { AuthFormInput } from "../AuthFormInput"; import { isEmailValid } from "utils/FormattingHelper"; import { toast } from "utils/Toast"; import { handleForgotPasswordButtonOnClick } from "../../actions"; import "./index.scss"; import { isDisposableEmail } from "utils/AuthUtils"; interface Props { email: string; setAuthMode: (authMode: string) => void; setEmail: (userEmail: string) => void; toggleModal: () => void; } export const RequestPasswordResetForm: React.FC<Props> = ({ setAuthMode, email, setEmail, toggleModal }) => { const [isLoading, setIsLoading] = useState(false); const handleRequestPasswordReset = useCallback( (event: React.MouseEvent<HTMLButtonElement, MouseEvent>) => { if (!email) { toast.error("Please enter your email"); return; } if (!isEmailValid(email)) { toast.error("Please enter a valid email"); return; } if (isDisposableEmail(email)) { toast.error("Please enter a valid email address. Temporary or disposable email addresses are not allowed."); return; } handleForgotPasswordButtonOnClick(event, email, setIsLoading, toggleModal); }, [email, toggleModal] ); return ( <> <button className="auth-screen-back-btn request-new-password-back-btn" onClick={() => { setAuthMode(APP_CONSTANTS.AUTH.ACTION_LABELS.SIGN_UP); }} > <BiArrowBack /> <span>Back</span> </button> <div className="text-bold text-white header mt-16">Forgot your password?</div> <div className="request-new-password-screen-text"> Enter your email address to reset your password. You may need to check your spam folder or unblock{" "} <strong>no-reply@requestly.io</strong>. </div> <div className="mt-24"> <AuthFormInput id="email" value={email} onValueChange={(email) => setEmail(email)} placeholder="E.g., you@company.com" label="Your email" /> </div> <RQButton block size="large" type="primary" className="request-new-password-btn mt-16" loading={isLoading} onClick={handleRequestPasswordReset} > Send reset link </RQButton> </> ); }; ```
```java Uses of the `final` keyword Connecting to FTP using Java Use strings in a `switch` statement Inheriting a constructor from a superclass Calling one constructor from another ```
Meike Babel and Laurence Courtois won in the final 6–0, 6–2 against Åsa Carlsson and Florencia Labat in the doubles of the 1998 ENKA Open. Seeds Champion seeds are indicated in bold text while text in italics indicates the round in which those seeds were eliminated. Sabine Appelmans / Els Callens (quarterfinals) Meike Babel / Laurence Courtois (champions) Radka Bobková / Caroline Schneider (first round) Olga Barabanschikova / Tina Križan (quarterfinals) Draw External links 1998 ENKA Open Doubles Draw ENKA Open 1998 WTA Tour
```javascript /** * * * path_to_url * * Unless required by applicable law or agreed to in writing, software * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ define(["jquery", "data/Parts", "part/Part", "part.scss", "data/Notes", "Tone/core/Transport", "part/SnapScroll", "part/TimelineIndicator"], function ($, PartsData, Part, partStyle, Notes, Transport, SnapScroll, TimelineIndicator) { var partsContainer = $("<div>", { "id" : "Parts" }).appendTo("body"); //reset the chord when the parts container changes size $(window).on("resize", function(){ Parts.setChord(currentNotes); }); /** * THE PARTS */ var parts = []; var currentPart = 0; //setup for (var i = 0; i < PartsData.parts.length; i++){ var part = new Part(partsContainer, PartsData.parts[i]); part.enabled = false; parts.push(part); } // the current notes var currentNotes = []; /** * The return object */ var Parts = { setChord : function(notes){ currentNotes = notes; parts.forEach(function(part){ part.setChord(notes); }); }, initChord : function(){ parts.forEach(function(part){ part.initChord(); }); }, setPart : function(currentIndex, nextIndex){ var lastPart = parts[currentIndex]; lastPart.enable(false); //setup the new part currentPart = nextIndex; parts[nextIndex].enable(true); } }; //initially just set it to C Parts.setChord(Notes.major.C); //and enable the first part parts[currentPart].enable(true); //and make the parts scrollable SnapScroll(partsContainer, Parts.setPart); //swap out the icons partsContainer.find(".slick-prev").addClass("icon-svg_left_arrow"); partsContainer.find(".slick-next").addClass("icon-svg_right_arrow"); //set the loop position of the transport Transport.loop = true; Transport.loopEnd = PartsData.loopDuration; //the timelint indicator TimelineIndicator(partsContainer); return Parts; }); ```
```ruby class Autobrr < Formula desc "Modern, easy to use download automation for torrents and usenet" homepage "path_to_url" url "path_to_url" sha256 your_sha256_hash license "GPL-2.0-or-later" bottle do sha256 cellar: :any_skip_relocation, arm64_sonoma: your_sha256_hash sha256 cellar: :any_skip_relocation, arm64_ventura: your_sha256_hash sha256 cellar: :any_skip_relocation, arm64_monterey: your_sha256_hash sha256 cellar: :any_skip_relocation, sonoma: your_sha256_hash sha256 cellar: :any_skip_relocation, ventura: your_sha256_hash sha256 cellar: :any_skip_relocation, monterey: your_sha256_hash sha256 cellar: :any_skip_relocation, x86_64_linux: your_sha256_hash end depends_on "go" => :build depends_on "node" => :build depends_on "pnpm" => :build def install system "pnpm", "install", "--dir", "web" system "pnpm", "--dir", "web", "run", "build" ldflags = "-s -w -X main.version=#{version} -X main.commit=#{tap.user}" system "go", "build", *std_go_args(output: bin/"autobrr", ldflags:), "./cmd/autobrr" system "go", "build", *std_go_args(output: bin/"autobrrctl", ldflags:), "./cmd/autobrrctl" end def post_install (var/"autobrr").mkpath end service do run [opt_bin/"autobrr", "--config", var/"autobrr/"] keep_alive true log_path var/"log/autobrr.log" end test do assert_match version.to_s, shell_output("#{bin}/autobrrctl version") port = free_port (testpath/"config.toml").write <<~EOS host = "127.0.0.1" port = #{port} logLevel = "INFO" checkForUpdates = false sessionSecret = "secret-session-key" EOS pid = fork do exec bin/"autobrr", "--config", "#{testpath}/" end sleep 4 begin system "curl", "-s", "--fail", "path_to_url#{port}/api/healthz/liveness" ensure Process.kill("TERM", pid) Process.wait(pid) end end end ```
```css @keyframes fadeIn { from { opacity: 0; } to { opacity: 1; } } .fadeIn { animation-name: fadeIn; } ```
Pirates of the Caribbean: The Price of Freedom is a 2011 adventure novel written by Ann C. Crispin. The book details the adventures of Captain Jack Sparrow as a young man after the events of Pirates of the Caribbean: Jack Sparrow and before the events of Pirates of the Caribbean: Legends of the Brethren Court. This is the final novel written by Crispin, who died in September 2013. Plot Twenty-five-year-old Jack Sparrow is a clean-cut merchant seaman pursuing a legitimate career as a first mate for the East India Trading Company. He sometimes thinks back to his boyhood pirating days, but he doesn’t miss Teague’s scrutiny or the constant threat of the noose. Besides, he doesn’t have much choice - he broke the Code when he freed a friend who had been accused of rogue piracy, and he can no longer show his face in Shipwreck Cove. When Jack’s ship is attacked by pirates and his captain dies in the altercation, he suddenly finds himself in command. The wily sailor’s skillful negotiations with the pirate captain—who turns out to be a woman from his past—result in a favorable outcome that puts Jack in line for an official promotion. After making port in Africa, Jack is summoned by Cutler Beckett, who makes him captain of a ship called the Wicked Wench. Beckett gives Jack an assignment. He has heard a legend about a magical island named Zerzura whose labyrinthine bowels are said to contain a glorious treasure. Beckett suspects that one of his house slaves, a girl named Ayisha, is from Zerzura. He asks Jack to take her along on his voyage and seduce her into divulging the island’s whereabouts. In payment for his services, Beckett promises Jack a share of the treasure. But this task isn’t as easy as Jack initially believes. Before she agrees to reveal the location of her home, Ayisha insists that Jack take her to the New World to rescue her brother, who has been sold into slavery in the Bahamas. Their voyage is long and arduous, and as they weather a vicious storm and a surprise attack from an old pirate foe, Jack grows to respect and admire Ayisha’s bravery. He knows that Beckett intends to enslave her people after robbing them of their treasure, and Jack’s moral compass revolts at the idea. It might be possible to deliver Ayisha safely to Zerzura, obtain some of the treasure, and convince Beckett that he never found it... but the greedy E.I.T.C. official has eyes everywhere, and if he learns that Jack has foiled his plans, he could take away the thing that Captain Sparrow loves most: his ship—and his freedom. Characters Jack Sparrow – a former pirate working for the EITC, captain of the Wicked Wench. Cutler Beckett – the EITC director for West Africa. Amenirdis/Ayisha – the lost princess from the island of Kerma. Robby Greene – a former pirate, Jack's friend and first mate of the Wicked Wench. Esmeralda – the Pirate Lord of the Caribbean, Jack's love interest. Edward Teague – Jack's father, the Pirate Lord of Madagascar and Keeper of the Pirate Code. Ian Mercer – Beckett's right-hand man. Borya Palachnik – the Pirate Lord of the Caspian Sea, leader of the rogue pirates. Christophe-Julien de Rapièr – Jack's former friend, one of the rogue pirates. Davy Jones – Lord of the underwater realms. Don Rafael – Esmeralda's grandfather, the Pirate Lord of the Caribbean. Hector Barbossa – a pirate captain in the Caribbean. Pintel and Ragetti – Barbossa's crewmembers Eduardo Villanueva – the Pirate Lord of the Adriatic Sea. Mistress Ching – the Pirate Lord of the Pacific Ocean. Ships Wicked Wench, an EITC merchant ship owned by Beckett and captained by Sparrow. Fair Wind, an EITC merchant brig. Venganza, a pirate frigate captained by Don Rafael and Esmeralda. La Vipère, Christophe's pirate brigantine. Koldunya, Borya's pirate sloop. Troubadour, Teague's pirate ship. Sentinel, an EITC brig, Beckett's flagship. Background When an editor working for Disney was seeking an author to write a novel dealing with the backstory of Captain Jack Sparrow, a major character from the Pirates of the Caribbean franchise, they contacted Crispin's agent and contracted her to write the book after reading The Han Solo Trilogy, which focused on Han Solo's backstory. According to Crispin herself, it took her three years to write and she did a lot of research on the historical period and the nautical stuff. She was also given the script for At World's End before the film released, but my book was finished before the script for On Stranger Tides was written. The instructions for A. C. Crispin in writing Pirates of the Caribbean: The Price of Freedom were to "stick to historical fact, unless it conflicts with established Pirates of the Caribbean continuity." Crispin made a faithful effort to do this, having done plenty of research, with Under the Black Flag by David Cordingly being one of the four pirate-related books she found herself using the most consistently. When releasing the fifth excerpt of her book, Crispin spoke of how she was privileged to write the scene where Han first beheld—and fell for—the Millennium Falcon in The Han Solo Trilogy. She assured fans it was every bit as thrilling to write the scene with Jack Sparrow and the Wicked Wench. The Price of Freedom was published on May 17, 2011. References External links Pirates of the Caribbean: The Price of Freedom at the Pirates of the Caribbean wiki Pirates of the Caribbean Novels about pirates Novels based on films
The 2023 Gree-Tour of Guangxi was a road cycling stage race that took place between 12 and 17 October 2023 in the Chinese province of Guangxi. It was the 4th edition of the Tour of Guangxi and the thirty-fifth and final event of the 2023 UCI World Tour. The race returned after three years of not being held due to the COVID-19 pandemic in China. Teams Eighteen teams, which consist of fourteen of the eighteen UCI WorldTour teams, three UCI Professional Continental teams and one national team participated in the race. Each team entered seven riders, except , , which entered six riders and , which entered five riders. Two riders of were pulled from the race before the start by their team. Madis Mihkels and Gerben Thijssen both are facing diciplinary action after Mihkels posted a picture on instagram of himself imitating slant eyes. UCI WorldTeams UCI Professional Continental teams National teams China Pre-race favorites The race has typically favored sprinters or punchers for the overall victory. This edition will be no different with Stage 4 likely to be the decisive stage. Tim Wellens is the only past winner of the Tour starting, he won in 2017 after winning a two-up sprint at Mashan Nongla Scenic Spot, the same place stage 4 finishes this year. Other riders considered favorites are punchers Matteo Jorgenson and Ivan Sosa (Both ) and Oscar Onley (). With the other stages most likely to end in a mass sprint the favorites for those stages are: Arnaud De Lie () who has amassed 10 pro wins this season and Olav Kooij () who has 11. Other contenders for the sprint stages are; Jonathan Milan (), Arvid de Kleijn (), Jakub Mareczko (), Max Kanter () and Elia Viviani (). Route Stages Stage 1 12 October 2023 — Beihai to Beihai, The opening stage of the Tour of Guangxi was a flat circuit stage starting and finishing in Beihai. A breakaway of five riders; Frederik Wandahl (), Louis Barré (), Dries De Bondt (), Omer Goldstein () and Julius Johansen (), got away early on. They were not allowed too large of a gap by the teams of the sprinters. Wandahl won both of the Mountain sprints to take the jersey for leading that classification going into the second stage. With Thomas De Gendt and his team doing the pacing the break was caught with just under 16km to go. , , and all had their trains at the front coming into the sprint. It was Milan who launched first, with Viviani coming around in the last few hundred metres to take victory. This was Viviani's first WorldTour win since 2019 where he won the 2019 EuroEyes Cyclassics. Stage 2 13 October 2023 — Beihai to Qinzhou, Stage 3 14 October 2023 — Nanning to Nanning, Stage 4 15 October 2023 — Nanning to Mashan Nongla Scenic Spot, Stage 5 16 October 2023 — Liuzhou to Guilin, Stage 6 17 October 2023 — Guilin to Guilin, Classification leadership Classification standings General classification Points classification Mountains classification Young rider classification Team classification References 2023 UCI World Tour 2023 in Chinese sport 2023 October 2023 sports events in China
```yaml # UTF-8 # YAML # # name name: # other_names ... # YAML # other_names: {"":"", "":"", "":"Tom"} # other_names: # sex M/F / sex: M # birth 4 N/A birth: 1900 # death 4 N/A death: 1970 # desc YAML # desc desc: | # links YAML list # # links: ```
```objective-c // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef V8_INTERFACE_DESCRIPTORS_H_ #define V8_INTERFACE_DESCRIPTORS_H_ #include <memory> #include "src/globals.h" #include "src/isolate.h" #include "src/machine-type.h" #include "src/register-arch.h" namespace v8 { namespace internal { #define INTERFACE_DESCRIPTOR_LIST(V) \ V(Abort) \ V(Allocate) \ V(AllocateHeapNumber) \ V(ApiCallback) \ V(ApiGetter) \ V(ArgumentsAdaptor) \ V(ArrayConstructor) \ V(ArrayNArgumentsConstructor) \ V(ArrayNoArgumentConstructor) \ V(ArraySingleArgumentConstructor) \ V(AsyncFunctionStackParameter) \ V(BigIntToI64) \ V(I64ToBigInt) \ V(BinaryOp) \ V(CallForwardVarargs) \ V(CallFunctionTemplate) \ V(CallTrampoline) \ V(CallVarargs) \ V(CallWithArrayLike) \ V(CallWithSpread) \ V(CEntry1ArgvOnStack) \ V(CloneObjectWithVector) \ V(Compare) \ V(ConstructForwardVarargs) \ V(ConstructStub) \ V(ConstructVarargs) \ V(ConstructWithArrayLike) \ V(ConstructWithSpread) \ V(ContextOnly) \ V(CppBuiltinAdaptor) \ V(EphemeronKeyBarrier) \ V(FastNewFunctionContext) \ V(FastNewObject) \ V(FrameDropperTrampoline) \ V(GetProperty) \ V(GrowArrayElements) \ V(InterpreterCEntry1) \ V(InterpreterCEntry2) \ V(InterpreterDispatch) \ V(InterpreterPushArgsThenCall) \ V(InterpreterPushArgsThenConstruct) \ V(JSTrampoline) \ V(Load) \ V(LoadGlobal) \ V(LoadGlobalWithVector) \ V(LoadWithVector) \ V(NewArgumentsElements) \ V(NoContext) \ V(RecordWrite) \ V(ResumeGenerator) \ V(RunMicrotasksEntry) \ V(RunMicrotasks) \ V(Store) \ V(StoreGlobal) \ V(StoreGlobalWithVector) \ V(StoreTransition) \ V(StoreWithVector) \ V(StringAt) \ V(StringSubstring) \ V(TypeConversion) \ V(TypeConversionStackParameter) \ V(Typeof) \ V(Void) \ V(WasmAtomicNotify) \ V(WasmI32AtomicWait) \ V(WasmI64AtomicWait) \ V(WasmMemoryGrow) \ V(WasmTableGet) \ V(WasmTableSet) \ V(WasmThrow) \ BUILTIN_LIST_TFS(V) class V8_EXPORT_PRIVATE CallInterfaceDescriptorData { public: enum Flag { kNoFlags = 0u, kNoContext = 1u << 0, // This indicates that the code uses a special frame that does not scan the // stack arguments, e.g. EntryFrame. And this allows the code to use // untagged stack arguments. kNoStackScan = 1u << 1, }; typedef base::Flags<Flag> Flags; CallInterfaceDescriptorData() = default; // A copy of the passed in registers and param_representations is made // and owned by the CallInterfaceDescriptorData. void InitializePlatformSpecific(int register_parameter_count, const Register* registers); // if machine_types is null, then an array of size // (return_count + parameter_count) will be created with // MachineType::AnyTagged() for each member. // // if machine_types is not null, then it should be of the size // (return_count + parameter_count). Those members of the parameter array will // be initialized from {machine_types}, and the rest initialized to // MachineType::AnyTagged(). void InitializePlatformIndependent(Flags flags, int return_count, int parameter_count, const MachineType* machine_types, int machine_types_length); void Reset(); bool IsInitialized() const { return IsInitializedPlatformSpecific() && IsInitializedPlatformIndependent(); } Flags flags() const { return flags_; } int return_count() const { return return_count_; } int param_count() const { return param_count_; } int register_param_count() const { return register_param_count_; } Register register_param(int index) const { return register_params_[index]; } Register* register_params() const { return register_params_; } MachineType return_type(int index) const { DCHECK_LT(index, return_count_); return machine_types_[index]; } MachineType param_type(int index) const { DCHECK_LT(index, param_count_); return machine_types_[return_count_ + index]; } void RestrictAllocatableRegisters(const Register* registers, int num) { DCHECK_EQ(allocatable_registers_, 0); for (int i = 0; i < num; ++i) { allocatable_registers_ |= registers[i].bit(); } DCHECK_GT(NumRegs(allocatable_registers_), 0); } RegList allocatable_registers() const { return allocatable_registers_; } private: bool IsInitializedPlatformSpecific() const { const bool initialized = (register_param_count_ == 0 && register_params_ == nullptr) || (register_param_count_ > 0 && register_params_ != nullptr); // Platform-specific initialization happens before platform-independent. return initialized; } bool IsInitializedPlatformIndependent() const { const bool initialized = return_count_ >= 0 && param_count_ >= 0 && machine_types_ != nullptr; // Platform-specific initialization happens before platform-independent. return initialized; } #ifdef DEBUG bool AllStackParametersAreTagged() const; #endif // DEBUG int register_param_count_ = -1; int return_count_ = -1; int param_count_ = -1; Flags flags_ = kNoFlags; // Specifying the set of registers that could be used by the register // allocator. Currently, it's only used by RecordWrite code stub. RegList allocatable_registers_ = 0; // |registers_params_| defines registers that are used for parameter passing. // |machine_types_| defines machine types for resulting values and incomping // parameters. // Both arrays are allocated dynamically by the InterfaceDescriptor and // freed on destruction. This is because static arrays cause creation of // runtime static initializers which we don't want. Register* register_params_ = nullptr; MachineType* machine_types_ = nullptr; DISALLOW_COPY_AND_ASSIGN(CallInterfaceDescriptorData); }; class V8_EXPORT_PRIVATE CallDescriptors : public AllStatic { public: enum Key { #define DEF_ENUM(name, ...) name, INTERFACE_DESCRIPTOR_LIST(DEF_ENUM) #undef DEF_ENUM NUMBER_OF_DESCRIPTORS }; static void InitializeOncePerProcess(); static void TearDown(); static CallInterfaceDescriptorData* call_descriptor_data( CallDescriptors::Key key) { return &call_descriptor_data_[key]; } static Key GetKey(const CallInterfaceDescriptorData* data) { ptrdiff_t index = data - call_descriptor_data_; DCHECK_LE(0, index); DCHECK_LT(index, CallDescriptors::NUMBER_OF_DESCRIPTORS); return static_cast<CallDescriptors::Key>(index); } private: static CallInterfaceDescriptorData call_descriptor_data_[NUMBER_OF_DESCRIPTORS]; }; class V8_EXPORT_PRIVATE CallInterfaceDescriptor { public: typedef CallInterfaceDescriptorData::Flags Flags; CallInterfaceDescriptor() : data_(nullptr) {} virtual ~CallInterfaceDescriptor() = default; explicit CallInterfaceDescriptor(CallDescriptors::Key key) : data_(CallDescriptors::call_descriptor_data(key)) {} Flags flags() const { return data()->flags(); } bool HasContextParameter() const { return (flags() & CallInterfaceDescriptorData::kNoContext) == 0; } int GetReturnCount() const { return data()->return_count(); } MachineType GetReturnType(int index) const { DCHECK_LT(index, data()->return_count()); return data()->return_type(index); } int GetParameterCount() const { return data()->param_count(); } int GetRegisterParameterCount() const { return data()->register_param_count(); } int GetStackParameterCount() const { return data()->param_count() - data()->register_param_count(); } Register GetRegisterParameter(int index) const { return data()->register_param(index); } MachineType GetParameterType(int index) const { DCHECK_LT(index, data()->param_count()); return data()->param_type(index); } RegList allocatable_registers() const { return data()->allocatable_registers(); } static const Register ContextRegister(); const char* DebugName() const; protected: const CallInterfaceDescriptorData* data() const { return data_; } virtual void InitializePlatformSpecific(CallInterfaceDescriptorData* data) { UNREACHABLE(); } virtual void InitializePlatformIndependent( CallInterfaceDescriptorData* data) { // Default descriptor configuration: one result, all parameters are passed // in registers and all parameters have MachineType::AnyTagged() type. data->InitializePlatformIndependent(CallInterfaceDescriptorData::kNoFlags, 1, data->register_param_count(), nullptr, 0); } // Initializes |data| using the platform dependent default set of registers. // It is intended to be used for TurboFan stubs when particular set of // registers does not matter. static void DefaultInitializePlatformSpecific( CallInterfaceDescriptorData* data, int register_parameter_count); // Initializes |data| using the platform dependent default set of registers // for JavaScript-compatible calling convention. // It is intended to be used for TurboFan stubs being called with JavaScript // linkage + additional parameters on registers and stack. static void JSDefaultInitializePlatformSpecific( CallInterfaceDescriptorData* data, int non_js_register_parameter_count); // Checks if float parameters are not assigned invalid registers. bool CheckFloatingPointParameters(CallInterfaceDescriptorData* data) { for (int i = 0; i < data->register_param_count(); i++) { if (IsFloatingPoint(data->param_type(i).representation())) { if (!IsValidFloatParameterRegister(data->register_param(i))) { return false; } } } return true; } bool IsValidFloatParameterRegister(Register reg); private: // {CallDescriptors} is allowed to call the private {Initialize} method. friend class CallDescriptors; const CallInterfaceDescriptorData* data_; void Initialize(CallInterfaceDescriptorData* data) { // The passed pointer should be a modifiable pointer to our own data. DCHECK_EQ(data, data_); DCHECK(!data->IsInitialized()); InitializePlatformSpecific(data); InitializePlatformIndependent(data); DCHECK(data->IsInitialized()); DCHECK(CheckFloatingPointParameters(data)); } }; #define DECLARE_DESCRIPTOR_WITH_BASE(name, base) \ public: \ explicit name() : base(key()) {} \ static inline CallDescriptors::Key key(); #if defined(V8_TARGET_ARCH_IA32) // To support all possible cases, we must limit the number of register args for // TFS builtins on ia32 to 3. Out of the 6 allocatable registers, esi is taken // as the context register and ebx is the root register. One register must // remain available to store the jump/call target. Thus 3 registers remain for // arguments. The reason this applies to TFS builtins specifically is because // this becomes relevant for builtins used as targets of Torque function // pointers (which must have a register available to store the target). // TODO(jgruber): Ideally we should just decrement kMaxBuiltinRegisterParams but // that comes with its own set of complications. It's possible, but requires // refactoring the calling convention of other existing stubs. constexpr int kMaxBuiltinRegisterParams = 4; constexpr int kMaxTFSBuiltinRegisterParams = 3; #else constexpr int kMaxBuiltinRegisterParams = 5; constexpr int kMaxTFSBuiltinRegisterParams = kMaxBuiltinRegisterParams; #endif STATIC_ASSERT(kMaxTFSBuiltinRegisterParams <= kMaxBuiltinRegisterParams); #define DECLARE_DEFAULT_DESCRIPTOR(name, base) \ DECLARE_DESCRIPTOR_WITH_BASE(name, base) \ protected: \ static const int kRegisterParams = \ kParameterCount > kMaxTFSBuiltinRegisterParams \ ? kMaxTFSBuiltinRegisterParams \ : kParameterCount; \ static const int kStackParams = kParameterCount - kRegisterParams; \ void InitializePlatformSpecific(CallInterfaceDescriptorData* data) \ override { \ DefaultInitializePlatformSpecific(data, kRegisterParams); \ } \ void InitializePlatformIndependent(CallInterfaceDescriptorData* data) \ override { \ data->InitializePlatformIndependent(Flags(kDescriptorFlags), kReturnCount, \ kParameterCount, nullptr, 0); \ } \ name(CallDescriptors::Key key) : base(key) {} \ \ public: #define DECLARE_JS_COMPATIBLE_DESCRIPTOR(name, base, \ non_js_reg_parameters_count) \ DECLARE_DESCRIPTOR_WITH_BASE(name, base) \ protected: \ void InitializePlatformSpecific(CallInterfaceDescriptorData* data) \ override { \ JSDefaultInitializePlatformSpecific(data, non_js_reg_parameters_count); \ } \ name(CallDescriptors::Key key) : base(key) {} \ \ public: #define DEFINE_RESULT_AND_PARAMETERS(return_count, ...) \ static constexpr int kDescriptorFlags = \ CallInterfaceDescriptorData::kNoFlags; \ static constexpr int kReturnCount = return_count; \ enum ParameterIndices { \ __dummy = -1, /* to be able to pass zero arguments */ \ ##__VA_ARGS__, \ \ kParameterCount, \ kContext = kParameterCount /* implicit parameter */ \ }; #define DEFINE_RESULT_AND_PARAMETERS_NO_CONTEXT(return_count, ...) \ static constexpr int kDescriptorFlags = \ CallInterfaceDescriptorData::kNoContext; \ static constexpr int kReturnCount = return_count; \ enum ParameterIndices { \ __dummy = -1, /* to be able to pass zero arguments */ \ ##__VA_ARGS__, \ \ kParameterCount \ }; // This is valid only for builtins that use EntryFrame, which does not scan // stack arguments on GC. #define DEFINE_PARAMETERS_ENTRY(...) \ static constexpr int kDescriptorFlags = \ CallInterfaceDescriptorData::kNoContext | \ CallInterfaceDescriptorData::kNoStackScan; \ static constexpr int kReturnCount = 1; \ enum ParameterIndices { \ __dummy = -1, /* to be able to pass zero arguments */ \ ##__VA_ARGS__, \ \ kParameterCount \ }; #define DEFINE_PARAMETERS(...) DEFINE_RESULT_AND_PARAMETERS(1, ##__VA_ARGS__) #define DEFINE_PARAMETERS_NO_CONTEXT(...) \ DEFINE_RESULT_AND_PARAMETERS_NO_CONTEXT(1, ##__VA_ARGS__) #define DEFINE_RESULT_AND_PARAMETER_TYPES(...) \ void InitializePlatformIndependent(CallInterfaceDescriptorData* data) \ override { \ MachineType machine_types[] = {__VA_ARGS__}; \ static_assert( \ kReturnCount + kParameterCount == arraysize(machine_types), \ "Parameter names definition is not consistent with parameter types"); \ data->InitializePlatformIndependent(Flags(kDescriptorFlags), kReturnCount, \ kParameterCount, machine_types, \ arraysize(machine_types)); \ } #define DEFINE_PARAMETER_TYPES(...) \ DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::AnyTagged() /* result */, \ ##__VA_ARGS__) #define DEFINE_JS_PARAMETERS(...) \ static constexpr int kDescriptorFlags = \ CallInterfaceDescriptorData::kNoFlags; \ static constexpr int kReturnCount = 1; \ enum ParameterIndices { \ kTarget, \ kNewTarget, \ kActualArgumentsCount, \ ##__VA_ARGS__, \ \ kParameterCount, \ kContext = kParameterCount /* implicit parameter */ \ }; #define DEFINE_JS_PARAMETER_TYPES(...) \ DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), /* kTarget */ \ MachineType::AnyTagged(), /* kNewTarget */ \ MachineType::Int32(), /* kActualArgumentsCount */ \ ##__VA_ARGS__) #define DECLARE_DESCRIPTOR(name, base) \ DECLARE_DESCRIPTOR_WITH_BASE(name, base) \ protected: \ void InitializePlatformSpecific(CallInterfaceDescriptorData* data) override; \ name(CallDescriptors::Key key) : base(key) {} \ \ public: class V8_EXPORT_PRIVATE VoidDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS() DEFINE_PARAMETER_TYPES() DECLARE_DESCRIPTOR(VoidDescriptor, CallInterfaceDescriptor) }; // Dummy descriptor used to mark builtins that don't yet have their proper // descriptor associated. typedef VoidDescriptor DummyDescriptor; // Dummy descriptor that marks builtins with C calling convention. typedef VoidDescriptor CCallDescriptor; class AllocateDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT(kRequestedSize) DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::TaggedPointer(), // result 1 MachineType::IntPtr()) // kRequestedSize DECLARE_DESCRIPTOR(AllocateDescriptor, CallInterfaceDescriptor) }; // This descriptor defines the JavaScript calling convention that can be used // by stubs: target, new.target, argc (not including the receiver) and context // are passed in registers while receiver and the rest of the JS arguments are // passed on the stack. class JSTrampolineDescriptor : public CallInterfaceDescriptor { public: DEFINE_JS_PARAMETERS() DEFINE_JS_PARAMETER_TYPES() DECLARE_JS_COMPATIBLE_DESCRIPTOR(JSTrampolineDescriptor, CallInterfaceDescriptor, 0) }; class ContextOnlyDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS() DEFINE_PARAMETER_TYPES() DECLARE_DESCRIPTOR(ContextOnlyDescriptor, CallInterfaceDescriptor) }; class NoContextDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT() DEFINE_PARAMETER_TYPES() DECLARE_DESCRIPTOR(NoContextDescriptor, CallInterfaceDescriptor) }; // LoadDescriptor is used by all stubs that implement Load/KeyedLoad ICs. class LoadDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kReceiver, kName, kSlot) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kReceiver MachineType::AnyTagged(), // kName MachineType::TaggedSigned()) // kSlot DECLARE_DESCRIPTOR(LoadDescriptor, CallInterfaceDescriptor) static const Register ReceiverRegister(); static const Register NameRegister(); static const Register SlotRegister(); }; class LoadGlobalDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kName, kSlot) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kName MachineType::TaggedSigned()) // kSlot DECLARE_DESCRIPTOR(LoadGlobalDescriptor, CallInterfaceDescriptor) static const Register NameRegister() { return LoadDescriptor::NameRegister(); } static const Register SlotRegister() { return LoadDescriptor::SlotRegister(); } }; class StoreDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kReceiver, kName, kValue, kSlot) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kReceiver MachineType::AnyTagged(), // kName MachineType::AnyTagged(), // kValue MachineType::TaggedSigned()) // kSlot DECLARE_DESCRIPTOR(StoreDescriptor, CallInterfaceDescriptor) static const Register ReceiverRegister(); static const Register NameRegister(); static const Register ValueRegister(); static const Register SlotRegister(); #if V8_TARGET_ARCH_IA32 static const bool kPassLastArgsOnStack = true; #else static const bool kPassLastArgsOnStack = false; #endif // Pass value and slot through the stack. static const int kStackArgumentsCount = kPassLastArgsOnStack ? 2 : 0; }; class StoreTransitionDescriptor : public StoreDescriptor { public: DEFINE_PARAMETERS(kReceiver, kName, kMap, kValue, kSlot, kVector) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kReceiver MachineType::AnyTagged(), // kName MachineType::AnyTagged(), // kMap MachineType::AnyTagged(), // kValue MachineType::TaggedSigned(), // kSlot MachineType::AnyTagged()) // kVector DECLARE_DESCRIPTOR(StoreTransitionDescriptor, StoreDescriptor) static const Register MapRegister(); static const Register SlotRegister(); static const Register VectorRegister(); // Pass value, slot and vector through the stack. static const int kStackArgumentsCount = kPassLastArgsOnStack ? 3 : 0; }; class StoreWithVectorDescriptor : public StoreDescriptor { public: DEFINE_PARAMETERS(kReceiver, kName, kValue, kSlot, kVector) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kReceiver MachineType::AnyTagged(), // kName MachineType::AnyTagged(), // kValue MachineType::TaggedSigned(), // kSlot MachineType::AnyTagged()) // kVector DECLARE_DESCRIPTOR(StoreWithVectorDescriptor, StoreDescriptor) static const Register VectorRegister(); // Pass value, slot and vector through the stack. static const int kStackArgumentsCount = kPassLastArgsOnStack ? 3 : 0; }; class StoreGlobalDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kName, kValue, kSlot) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kName MachineType::AnyTagged(), // kValue MachineType::TaggedSigned()) // kSlot DECLARE_DESCRIPTOR(StoreGlobalDescriptor, CallInterfaceDescriptor) static const bool kPassLastArgsOnStack = StoreDescriptor::kPassLastArgsOnStack; // Pass value and slot through the stack. static const int kStackArgumentsCount = kPassLastArgsOnStack ? 2 : 0; static const Register NameRegister() { return StoreDescriptor::NameRegister(); } static const Register ValueRegister() { return StoreDescriptor::ValueRegister(); } static const Register SlotRegister() { return StoreDescriptor::SlotRegister(); } }; class StoreGlobalWithVectorDescriptor : public StoreGlobalDescriptor { public: DEFINE_PARAMETERS(kName, kValue, kSlot, kVector) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kName MachineType::AnyTagged(), // kValue MachineType::TaggedSigned(), // kSlot MachineType::AnyTagged()) // kVector DECLARE_DESCRIPTOR(StoreGlobalWithVectorDescriptor, StoreGlobalDescriptor) static const Register VectorRegister() { return StoreWithVectorDescriptor::VectorRegister(); } // Pass value, slot and vector through the stack. static const int kStackArgumentsCount = kPassLastArgsOnStack ? 3 : 0; }; class LoadWithVectorDescriptor : public LoadDescriptor { public: DEFINE_PARAMETERS(kReceiver, kName, kSlot, kVector) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kReceiver MachineType::AnyTagged(), // kName MachineType::TaggedSigned(), // kSlot MachineType::AnyTagged()) // kVector DECLARE_DESCRIPTOR(LoadWithVectorDescriptor, LoadDescriptor) static const Register VectorRegister(); #if V8_TARGET_ARCH_IA32 static const bool kPassLastArgsOnStack = true; #else static const bool kPassLastArgsOnStack = false; #endif // Pass vector through the stack. static const int kStackArgumentsCount = kPassLastArgsOnStack ? 1 : 0; }; class LoadGlobalWithVectorDescriptor : public LoadGlobalDescriptor { public: DEFINE_PARAMETERS(kName, kSlot, kVector) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kName MachineType::TaggedSigned(), // kSlot MachineType::AnyTagged()) // kVector DECLARE_DESCRIPTOR(LoadGlobalWithVectorDescriptor, LoadGlobalDescriptor) #if V8_TARGET_ARCH_IA32 // On ia32, LoadWithVectorDescriptor passes vector on the stack and thus we // need to choose a new register here. static const Register VectorRegister() { return edx; } #else static const Register VectorRegister() { return LoadWithVectorDescriptor::VectorRegister(); } #endif }; class FastNewFunctionContextDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kScopeInfo, kSlots) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kScopeInfo MachineType::Int32()) // kSlots DECLARE_DESCRIPTOR(FastNewFunctionContextDescriptor, CallInterfaceDescriptor) static const Register ScopeInfoRegister(); static const Register SlotsRegister(); }; class FastNewObjectDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kTarget, kNewTarget) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kTarget MachineType::AnyTagged()) // kNewTarget DECLARE_DESCRIPTOR(FastNewObjectDescriptor, CallInterfaceDescriptor) static const Register TargetRegister(); static const Register NewTargetRegister(); }; class RecordWriteDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT(kObject, kSlot, kRememberedSet, kFPMode) DEFINE_PARAMETER_TYPES(MachineType::TaggedPointer(), // kObject MachineType::Pointer(), // kSlot MachineType::TaggedSigned(), // kRememberedSet MachineType::TaggedSigned()) // kFPMode DECLARE_DESCRIPTOR(RecordWriteDescriptor, CallInterfaceDescriptor) }; class EphemeronKeyBarrierDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT(kObject, kSlotAddress, kFPMode) DEFINE_PARAMETER_TYPES(MachineType::TaggedPointer(), // kObject MachineType::Pointer(), // kSlotAddress MachineType::TaggedSigned()) // kFPMode DECLARE_DESCRIPTOR(EphemeronKeyBarrierDescriptor, CallInterfaceDescriptor) }; class TypeConversionDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kArgument) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged()) DECLARE_DESCRIPTOR(TypeConversionDescriptor, CallInterfaceDescriptor) static const Register ArgumentRegister(); }; class TypeConversionStackParameterDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kArgument) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged()) DECLARE_DESCRIPTOR(TypeConversionStackParameterDescriptor, CallInterfaceDescriptor) }; class AsyncFunctionStackParameterDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kPromise, kResult) DEFINE_PARAMETER_TYPES(MachineType::TaggedPointer(), MachineType::AnyTagged()) DECLARE_DESCRIPTOR(AsyncFunctionStackParameterDescriptor, CallInterfaceDescriptor) }; class GetPropertyDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kObject, kKey) DECLARE_DEFAULT_DESCRIPTOR(GetPropertyDescriptor, CallInterfaceDescriptor) }; class TypeofDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kObject) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged()) DECLARE_DESCRIPTOR(TypeofDescriptor, CallInterfaceDescriptor) }; class CallTrampolineDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kFunction, kActualArgumentsCount) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kFunction MachineType::Int32()) // kActualArgumentsCount DECLARE_DESCRIPTOR(CallTrampolineDescriptor, CallInterfaceDescriptor) }; class CallVarargsDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kTarget, kActualArgumentsCount, kArgumentsLength, kArgumentsList) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kTarget MachineType::Int32(), // kActualArgumentsCount MachineType::Int32(), // kArgumentsLength MachineType::AnyTagged()) // kArgumentsList DECLARE_DESCRIPTOR(CallVarargsDescriptor, CallInterfaceDescriptor) }; class CallForwardVarargsDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kTarget, kActualArgumentsCount, kStartIndex) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kTarget MachineType::Int32(), // kActualArgumentsCount MachineType::Int32()) // kStartIndex DECLARE_DESCRIPTOR(CallForwardVarargsDescriptor, CallInterfaceDescriptor) }; class CallFunctionTemplateDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kFunctionTemplateInfo, kArgumentsCount) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kFunctionTemplateInfo MachineType::IntPtr()) // kArgumentsCount DECLARE_DESCRIPTOR(CallFunctionTemplateDescriptor, CallInterfaceDescriptor) }; class CallWithSpreadDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kTarget, kArgumentsCount, kSpread) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kTarget MachineType::Int32(), // kArgumentsCount MachineType::AnyTagged()) // kSpread DECLARE_DESCRIPTOR(CallWithSpreadDescriptor, CallInterfaceDescriptor) }; class CallWithArrayLikeDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kTarget, kArgumentsList) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kTarget MachineType::AnyTagged()) // kArgumentsList DECLARE_DESCRIPTOR(CallWithArrayLikeDescriptor, CallInterfaceDescriptor) }; class ConstructVarargsDescriptor : public CallInterfaceDescriptor { public: DEFINE_JS_PARAMETERS(kArgumentsLength, kArgumentsList) DEFINE_JS_PARAMETER_TYPES(MachineType::Int32(), // kArgumentsLength MachineType::AnyTagged()) // kArgumentsList DECLARE_DESCRIPTOR(ConstructVarargsDescriptor, CallInterfaceDescriptor) }; class ConstructForwardVarargsDescriptor : public CallInterfaceDescriptor { public: DEFINE_JS_PARAMETERS(kStartIndex) DEFINE_JS_PARAMETER_TYPES(MachineType::Int32()) DECLARE_DESCRIPTOR(ConstructForwardVarargsDescriptor, CallInterfaceDescriptor) }; class ConstructWithSpreadDescriptor : public CallInterfaceDescriptor { public: DEFINE_JS_PARAMETERS(kSpread) DEFINE_JS_PARAMETER_TYPES(MachineType::AnyTagged()) DECLARE_DESCRIPTOR(ConstructWithSpreadDescriptor, CallInterfaceDescriptor) }; class ConstructWithArrayLikeDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kTarget, kNewTarget, kArgumentsList) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kTarget MachineType::AnyTagged(), // kNewTarget MachineType::AnyTagged()) // kArgumentsList DECLARE_DESCRIPTOR(ConstructWithArrayLikeDescriptor, CallInterfaceDescriptor) }; // TODO(ishell): consider merging this with ArrayConstructorDescriptor class ConstructStubDescriptor : public CallInterfaceDescriptor { public: // TODO(jgruber): Remove the unused allocation site parameter. DEFINE_JS_PARAMETERS(kAllocationSite) DEFINE_JS_PARAMETER_TYPES(MachineType::AnyTagged()) // TODO(ishell): Use DECLARE_JS_COMPATIBLE_DESCRIPTOR if registers match DECLARE_DESCRIPTOR(ConstructStubDescriptor, CallInterfaceDescriptor) }; class AbortDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT(kMessageOrMessageId) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged()) DECLARE_DESCRIPTOR(AbortDescriptor, CallInterfaceDescriptor) }; class AllocateHeapNumberDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT() DEFINE_PARAMETER_TYPES() DECLARE_DESCRIPTOR(AllocateHeapNumberDescriptor, CallInterfaceDescriptor) }; class ArrayConstructorDescriptor : public CallInterfaceDescriptor { public: DEFINE_JS_PARAMETERS(kAllocationSite) DEFINE_JS_PARAMETER_TYPES(MachineType::AnyTagged()) DECLARE_JS_COMPATIBLE_DESCRIPTOR(ArrayConstructorDescriptor, CallInterfaceDescriptor, 1) }; class ArrayNArgumentsConstructorDescriptor : public CallInterfaceDescriptor { public: // This descriptor declares only register arguments while respective number // of JS arguments stay on the expression stack. // The ArrayNArgumentsConstructor builtin does not access stack arguments // directly it just forwards them to the runtime function. DEFINE_PARAMETERS(kFunction, kAllocationSite, kActualArgumentsCount) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kFunction, MachineType::AnyTagged(), // kAllocationSite MachineType::Int32()) // kActualArgumentsCount DECLARE_DESCRIPTOR(ArrayNArgumentsConstructorDescriptor, CallInterfaceDescriptor) }; class ArrayNoArgumentConstructorDescriptor : public ArrayNArgumentsConstructorDescriptor { public: // This descriptor declares same register arguments as the parent // ArrayNArgumentsConstructorDescriptor and it declares indices for // JS arguments passed on the expression stack. DEFINE_PARAMETERS(kFunction, kAllocationSite, kActualArgumentsCount, kFunctionParameter) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kFunction MachineType::AnyTagged(), // kAllocationSite MachineType::Int32(), // kActualArgumentsCount MachineType::AnyTagged()) // kFunctionParameter DECLARE_DESCRIPTOR(ArrayNoArgumentConstructorDescriptor, ArrayNArgumentsConstructorDescriptor) }; class ArraySingleArgumentConstructorDescriptor : public ArrayNArgumentsConstructorDescriptor { public: // This descriptor declares same register arguments as the parent // ArrayNArgumentsConstructorDescriptor and it declares indices for // JS arguments passed on the expression stack. DEFINE_PARAMETERS(kFunction, kAllocationSite, kActualArgumentsCount, kFunctionParameter, kArraySizeSmiParameter) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kFunction MachineType::AnyTagged(), // kAllocationSite MachineType::Int32(), // kActualArgumentsCount MachineType::AnyTagged(), // kFunctionParameter MachineType::AnyTagged()) // kArraySizeSmiParameter DECLARE_DESCRIPTOR(ArraySingleArgumentConstructorDescriptor, ArrayNArgumentsConstructorDescriptor) }; class CompareDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kLeft, kRight) DECLARE_DESCRIPTOR(CompareDescriptor, CallInterfaceDescriptor) }; class BinaryOpDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kLeft, kRight) DECLARE_DESCRIPTOR(BinaryOpDescriptor, CallInterfaceDescriptor) }; // This desciptor is shared among String.p.charAt/charCodeAt/codePointAt // as they all have the same interface. class StringAtDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kReceiver, kPosition) // TODO(turbofan): Return untagged value here. DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::TaggedSigned(), // result 1 MachineType::AnyTagged(), // kReceiver MachineType::IntPtr()) // kPosition DECLARE_DESCRIPTOR(StringAtDescriptor, CallInterfaceDescriptor) }; class StringSubstringDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kString, kFrom, kTo) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kString MachineType::IntPtr(), // kFrom MachineType::IntPtr()) // kTo // TODO(turbofan): Allow builtins to return untagged values. DECLARE_DESCRIPTOR(StringSubstringDescriptor, CallInterfaceDescriptor) }; class ArgumentsAdaptorDescriptor : public CallInterfaceDescriptor { public: DEFINE_JS_PARAMETERS(kExpectedArgumentsCount) DEFINE_JS_PARAMETER_TYPES(MachineType::Int32()) DECLARE_DESCRIPTOR(ArgumentsAdaptorDescriptor, CallInterfaceDescriptor) }; class CppBuiltinAdaptorDescriptor : public CallInterfaceDescriptor { public: DEFINE_JS_PARAMETERS(kCFunction) DEFINE_JS_PARAMETER_TYPES(MachineType::Pointer()) DECLARE_JS_COMPATIBLE_DESCRIPTOR(CppBuiltinAdaptorDescriptor, CallInterfaceDescriptor, 1) }; class CEntry1ArgvOnStackDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kArity, // register argument kCFunction, // register argument kPadding, // stack argument 1 (just padding) kArgcSmi, // stack argument 2 kTargetCopy, // stack argument 3 kNewTargetCopy) // stack argument 4 DEFINE_PARAMETER_TYPES(MachineType::Int32(), // kArity MachineType::Pointer(), // kCFunction MachineType::AnyTagged(), // kPadding MachineType::AnyTagged(), // kArgcSmi MachineType::AnyTagged(), // kTargetCopy MachineType::AnyTagged()) // kNewTargetCopy DECLARE_DESCRIPTOR(CEntry1ArgvOnStackDescriptor, CallInterfaceDescriptor) }; class ApiCallbackDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kApiFunctionAddress, kActualArgumentsCount, kCallData, kHolder) // receiver is implicit stack argument 1 // argv are implicit stack arguments [2, 2 + kArgc[ DEFINE_PARAMETER_TYPES(MachineType::Pointer(), // kApiFunctionAddress MachineType::IntPtr(), // kActualArgumentsCount MachineType::AnyTagged(), // kCallData MachineType::AnyTagged()) // kHolder DECLARE_DESCRIPTOR(ApiCallbackDescriptor, CallInterfaceDescriptor) }; class ApiGetterDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kReceiver, kHolder, kCallback) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kReceiver MachineType::AnyTagged(), // kHolder MachineType::AnyTagged()) // kCallback DECLARE_DESCRIPTOR(ApiGetterDescriptor, CallInterfaceDescriptor) static const Register ReceiverRegister(); static const Register HolderRegister(); static const Register CallbackRegister(); }; // TODO(turbofan): We should probably rename this to GrowFastElementsDescriptor. class GrowArrayElementsDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kObject, kKey) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kObject MachineType::AnyTagged()) // kKey DECLARE_DESCRIPTOR(GrowArrayElementsDescriptor, CallInterfaceDescriptor) static const Register ObjectRegister(); static const Register KeyRegister(); }; class NewArgumentsElementsDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kFrame, kLength, kMappedCount) DEFINE_PARAMETER_TYPES(MachineType::Pointer(), // kFrame MachineType::TaggedSigned(), // kLength MachineType::TaggedSigned()) // kMappedCount DECLARE_DESCRIPTOR(NewArgumentsElementsDescriptor, CallInterfaceDescriptor) }; class V8_EXPORT_PRIVATE InterpreterDispatchDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kAccumulator, kBytecodeOffset, kBytecodeArray, kDispatchTable) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kAccumulator MachineType::IntPtr(), // kBytecodeOffset MachineType::AnyTagged(), // kBytecodeArray MachineType::IntPtr()) // kDispatchTable DECLARE_DESCRIPTOR(InterpreterDispatchDescriptor, CallInterfaceDescriptor) }; class InterpreterPushArgsThenCallDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kNumberOfArguments, kFirstArgument, kFunction) DEFINE_PARAMETER_TYPES(MachineType::Int32(), // kNumberOfArguments MachineType::Pointer(), // kFirstArgument MachineType::AnyTagged()) // kFunction DECLARE_DESCRIPTOR(InterpreterPushArgsThenCallDescriptor, CallInterfaceDescriptor) }; class InterpreterPushArgsThenConstructDescriptor : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kNumberOfArguments, kFirstArgument, kConstructor, kNewTarget, kFeedbackElement) DEFINE_PARAMETER_TYPES(MachineType::Int32(), // kNumberOfArguments MachineType::Pointer(), // kFirstArgument MachineType::AnyTagged(), // kConstructor MachineType::AnyTagged(), // kNewTarget MachineType::AnyTagged()) // kFeedbackElement DECLARE_DESCRIPTOR(InterpreterPushArgsThenConstructDescriptor, CallInterfaceDescriptor) #if V8_TARGET_ARCH_IA32 static const bool kPassLastArgsOnStack = true; #else static const bool kPassLastArgsOnStack = false; #endif // Pass constructor, new target and feedback element through the stack. static const int kStackArgumentsCount = kPassLastArgsOnStack ? 3 : 0; }; class InterpreterCEntry1Descriptor : public CallInterfaceDescriptor { public: DEFINE_RESULT_AND_PARAMETERS(1, kNumberOfArguments, kFirstArgument, kFunctionEntry) DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::AnyTagged(), // result 1 MachineType::Int32(), // kNumberOfArguments MachineType::Pointer(), // kFirstArgument MachineType::Pointer()) // kFunctionEntry DECLARE_DESCRIPTOR(InterpreterCEntry1Descriptor, CallInterfaceDescriptor) }; class InterpreterCEntry2Descriptor : public CallInterfaceDescriptor { public: DEFINE_RESULT_AND_PARAMETERS(2, kNumberOfArguments, kFirstArgument, kFunctionEntry) DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::AnyTagged(), // result 1 MachineType::AnyTagged(), // result 2 MachineType::Int32(), // kNumberOfArguments MachineType::Pointer(), // kFirstArgument MachineType::Pointer()) // kFunctionEntry DECLARE_DESCRIPTOR(InterpreterCEntry2Descriptor, CallInterfaceDescriptor) }; class ResumeGeneratorDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kValue, kGenerator) DEFINE_PARAMETER_TYPES(MachineType::AnyTagged(), // kValue MachineType::AnyTagged()) // kGenerator DECLARE_DESCRIPTOR(ResumeGeneratorDescriptor, CallInterfaceDescriptor) }; class FrameDropperTrampolineDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kRestartFp) DEFINE_PARAMETER_TYPES(MachineType::Pointer()) DECLARE_DESCRIPTOR(FrameDropperTrampolineDescriptor, CallInterfaceDescriptor) }; class RunMicrotasksEntryDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_ENTRY(kRootRegisterValue, kMicrotaskQueue) DEFINE_PARAMETER_TYPES(MachineType::Pointer(), // kRootRegisterValue MachineType::Pointer()) // kMicrotaskQueue DECLARE_DESCRIPTOR(RunMicrotasksEntryDescriptor, CallInterfaceDescriptor) }; class RunMicrotasksDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kMicrotaskQueue) DEFINE_PARAMETER_TYPES(MachineType::Pointer()) DECLARE_DESCRIPTOR(RunMicrotasksDescriptor, CallInterfaceDescriptor) static Register MicrotaskQueueRegister(); }; class WasmMemoryGrowDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT(kNumPages) DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::Int32(), // result 1 MachineType::Int32()) // kNumPages DECLARE_DESCRIPTOR(WasmMemoryGrowDescriptor, CallInterfaceDescriptor) }; class WasmTableGetDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT(kTableIndex, kEntryIndex) DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::AnyTagged(), // result 1 MachineType::TaggedSigned(), // kTableIndex MachineType::Int32()) // kEntryIndex DECLARE_DESCRIPTOR(WasmTableGetDescriptor, CallInterfaceDescriptor) }; class WasmTableSetDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT(kTableIndex, kEntryIndex, kValue) DEFINE_PARAMETER_TYPES(MachineType::TaggedSigned(), // kTableIndex MachineType::Int32(), // kEntryIndex MachineType::AnyTagged()) // kValue DECLARE_DESCRIPTOR(WasmTableSetDescriptor, CallInterfaceDescriptor) }; class WasmThrowDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT(kException) DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::AnyTagged(), // result 1 MachineType::AnyTagged()) // kException DECLARE_DESCRIPTOR(WasmThrowDescriptor, CallInterfaceDescriptor) }; class I64ToBigIntDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT(kArgument) DEFINE_PARAMETER_TYPES(MachineType::Int64()) // kArgument DECLARE_DESCRIPTOR(I64ToBigIntDescriptor, CallInterfaceDescriptor) }; class BigIntToI64Descriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kArgument) DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::Int64(), // result 1 MachineType::AnyTagged()) // kArgument DECLARE_DESCRIPTOR(BigIntToI64Descriptor, CallInterfaceDescriptor) }; class WasmAtomicNotifyDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT(kAddress, kCount) DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::Uint32(), // result 1 MachineType::Uint32(), // kAddress MachineType::Uint32()) // kCount DECLARE_DESCRIPTOR(WasmAtomicNotifyDescriptor, CallInterfaceDescriptor) }; class WasmI32AtomicWaitDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT(kAddress, kExpectedValue, kTimeout) DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::Uint32(), // result 1 MachineType::Uint32(), // kAddress MachineType::Int32(), // kExpectedValue MachineType::Float64()) // kTimeout DECLARE_DESCRIPTOR(WasmI32AtomicWaitDescriptor, CallInterfaceDescriptor) }; class WasmI64AtomicWaitDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS_NO_CONTEXT(kAddress, kExpectedValueHigh, kExpectedValueLow, kTimeout) DEFINE_RESULT_AND_PARAMETER_TYPES( MachineType::Uint32(), // result 1 MachineType::Uint32(), // kAddress MachineType::Uint32(), // kExpectedValueHigh MachineType::Uint32(), // kExpectedValueLow MachineType::Float64()) // kTimeout DECLARE_DESCRIPTOR(WasmI64AtomicWaitDescriptor, CallInterfaceDescriptor) }; class CloneObjectWithVectorDescriptor final : public CallInterfaceDescriptor { public: DEFINE_PARAMETERS(kSource, kFlags, kSlot, kVector) DEFINE_RESULT_AND_PARAMETER_TYPES(MachineType::TaggedPointer(), // result 1 MachineType::AnyTagged(), // kSource MachineType::TaggedSigned(), // kFlags MachineType::TaggedSigned(), // kSlot MachineType::AnyTagged()) // kVector DECLARE_DESCRIPTOR(CloneObjectWithVectorDescriptor, CallInterfaceDescriptor) }; #define DEFINE_TFS_BUILTIN_DESCRIPTOR(Name, ...) \ class Name##Descriptor : public CallInterfaceDescriptor { \ public: \ DEFINE_PARAMETERS(__VA_ARGS__) \ DECLARE_DEFAULT_DESCRIPTOR(Name##Descriptor, CallInterfaceDescriptor) \ }; BUILTIN_LIST_TFS(DEFINE_TFS_BUILTIN_DESCRIPTOR) #undef DEFINE_TFS_BUILTIN_DESCRIPTOR #undef DECLARE_DEFAULT_DESCRIPTOR #undef DECLARE_DESCRIPTOR_WITH_BASE #undef DECLARE_DESCRIPTOR #undef DECLARE_JS_COMPATIBLE_DESCRIPTOR #undef DEFINE_RESULT_AND_PARAMETERS #undef DEFINE_RESULT_AND_PARAMETERS_NO_CONTEXT #undef DEFINE_PARAMETERS #undef DEFINE_PARAMETERS_NO_CONTEXT #undef DEFINE_RESULT_AND_PARAMETER_TYPES #undef DEFINE_PARAMETER_TYPES #undef DEFINE_JS_PARAMETERS #undef DEFINE_JS_PARAMETER_TYPES // We define the association between CallDescriptors::Key and the specialized // descriptor here to reduce boilerplate and mistakes. #define DEF_KEY(name, ...) \ CallDescriptors::Key name##Descriptor::key() { return CallDescriptors::name; } INTERFACE_DESCRIPTOR_LIST(DEF_KEY) #undef DEF_KEY } // namespace internal } // namespace v8 #endif // V8_INTERFACE_DESCRIPTORS_H_ ```
```smalltalk // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Threading.Tasks; using Windows.UI.Xaml.Media; namespace Microsoft.Toolkit.Uwp.UI.Controls.Markdown.Render { /// <summary> /// An interface used to resolve images in the markdown. /// </summary> public interface IImageResolver { /// <summary> /// Resolves an Image from a Url. /// </summary> /// <param name="url">Url to Resolve.</param> /// <param name="tooltip">Tooltip for Image.</param> /// <returns>Image</returns> Task<ImageSource> ResolveImageAsync(string url, string tooltip); } } ```
The mangrove pitta (Pitta megarhyncha) is a species of passerine bird in the family Pittidae native to the eastern Indian Subcontinent and western Southeast Asia. It is part of a superspecies where it is placed with the Indian pitta, the fairy pitta and the blue-winged pitta but has no recognized subspecies. A colourful bird, it has a black head with brown crown, white throat, greenish upper parts, buff underparts and reddish vent area. Its range extends from India to Malaysia and Indonesia. It is found in mangrove and nipa palm forests where it feeds on crustaceans, mollusks and insects. Its call, sometimes rendered as wieuw-wieuw, is sung from a high perch on a mangrove tree. Taxonomy The mangrove pitta was first described by German ornithologist Hermann Schlegel in 1863. Its species name is derived from the Ancient Greek words mega- "large", and rhynchos "beak". It forms a superspecies with the Indian pitta (P. brachyura), fairy pitta (P. nympha) and blue-winged pitta (P. moluccensis). Alternate common names include: Larger blue-winged/Malay pitta, Brève des palétuviers (in French), Große Blauflügelpitta (in German), and Pita de Manglar (in Spanish). There are no recognized subspecies. Description Measuring in length, the mangrove pitta has a black head with a buff-coloured crown, white chin and buff underparts. The shoulders and mantle are greenish and the vent is reddish. Juveniles have similar patterned plumage but are duller. It resembles the blue-winged pitta but can be distinguished by its much heavier bill. Its call, transcribed as wieuw-wieuw has been noted to be "more slurred" than the blue-winged pitta. Distribution and habitat The mangrove pitta is native to the countries of: Bangladesh, India, Indonesia, Malaysia, Myanmar, Singapore, and Thailand (primarily the west coast of the southern Thai peninsula). Its natural habitat is specialised and restriction to subtropical or tropical mangrove forests and Nipa palm stands. It is threatened by habitat loss. Its diet consists of crustaceans, mollusks and terrestrial insects. Behaviour While all pittas are noted for being difficult to study and spot in the wild, the mangrove pitta is one of the easier ones to spot as it sits high up in mangrove trees and calls. A tape recording of its call will often bring it forth. It tends to be vocal while brooding but quiet at other times. Notes References Further reading External links Mangrove pitta bibliography Pitta distribution maps, all species mangrove pitta Birds of Bangladesh Birds of Myanmar Birds of the Malay Peninsula Birds of Sumatra mangrove pitta Articles containing video clips Taxonomy articles created by Polbot Taxa named by Hermann Schlegel
Ramnganing Muivah also known as Ram Muivah is a Retired Indian bureaucrat turned politician. He has served as an IAS Officer (Batch-1985) for 35 years and was the Secretary of North Eastern Council before retiring in 2020. He was elected to the Manipur Legislative Assembly from Ukhrul in 2022 Manipur Legislative Assembly election as a member of Naga People's Front. References Living people Manipur MLAs 2022–2027 Naga People's Front politicians 1960 births Naga people Alumni of the University of Birmingham
```java @ExportPackage package com.yahoo.vespa.orchestrator.status; import com.yahoo.osgi.annotation.ExportPackage; ```
The Tootinaowaziibeeng First Nation is an Anishinabe band in Manitoba. They are located on reserve land at IR Valley River 63A. They number about 600 with a similar number located in various locations away from the reserve. The location of their land is east of Roblin and west of Grandview. The band members are associated with the West Region Tribal Council. References Valley River 63A Tootinaowaziibeeng First Nation First Nations governments in Manitoba
```kotlin package ktx.artemis import com.artemis.Component import com.artemis.EntityTransmuter import com.artemis.EntityTransmuterFactory /** * Adds a [Component] to an [EntityTransmuterFactory]. * * @receiver the [EntityTransmuterFactory] for creating an [EntityTransmuter]. * @param T the component to add when transmuting an entity. * @return the [EntityTransmuterFactory]. */ inline fun <reified T : Component> EntityTransmuterFactory.add(): EntityTransmuterFactory = add(T::class.java) /** * Removes a [Component] from an [EntityTransmuterFactory]. * * @receiver the [EntityTransmuterFactory] for creating an [EntityTransmuter]. * @param T the component to remove when transmuting an entity. * @return the [EntityTransmuterFactory]. */ inline fun <reified T : Component> EntityTransmuterFactory.remove(): EntityTransmuterFactory = remove(T::class.java) ```
```xml import { webpackCliTask, argv, logger } from 'just-scripts'; import * as path from 'path'; import * as fs from 'fs'; import { execSync } from 'child_process'; import { getJustArgv } from './argv'; export function webpack() { const args = getJustArgv(); return webpackCliTask({ webpackCliArgs: args.production ? ['--mode=production'] : [], nodeArgs: ['--max-old-space-size=4096'], env: process.env, }); } export function webpackDevServer( options: Partial<{ /** * Open the default browser * @default 'true' */ open: 'true' | 'false'; /** * @default 'webpack.serve.config.js' */ webpackConfig: string; /** * @default false */ cached: boolean; }> = {}, ) { return async () => { const args = { ...argv(), ...options }; const fp = (await import('find-free-port')).default; const webpackConfigFilePath = args.webpackConfig || 'webpack.serve.config.js'; const configPath = path.resolve(process.cwd(), webpackConfigFilePath); const port = await fp(4322, 4400); const openBrowser = args.open === 'false' ? '' : '--open'; if (fs.existsSync(configPath)) { const webpackDevServerPath = require.resolve('webpack-dev-server/bin/webpack-dev-server.js'); const cmd = `node ${webpackDevServerPath} --config ${configPath} --port ${port} ${openBrowser}`.trim(); logger.info(`Caching enabled: ${args.cached ? 'YES' : 'NO'}`); logger.info('Running: ', cmd); process.env.cached = String(args.cached); execSync(cmd, { stdio: 'inherit' }); } }; } ```
```makefile # Makefile generated by XPJ for ANDROID16 -include Makefile.custom ProjectName = SnippetDeformableMesh SnippetDeformableMesh_cppfiles += ./../../SnippetCommon/ClassicMain.cpp SnippetDeformableMesh_cppfiles += ./../../SnippetDeformableMesh/SnippetDeformableMesh.cpp SnippetDeformableMesh_cppfiles += ./../../SnippetDeformableMesh/SnippetDeformableMeshRender.cpp SnippetDeformableMesh_cpp_debug_dep = $(addprefix $(DEPSDIR)/SnippetDeformableMesh/debug/, $(subst ./, , $(subst ../, , $(patsubst %.cpp, %.cpp.P, $(SnippetDeformableMesh_cppfiles))))) SnippetDeformableMesh_cc_debug_dep = $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(patsubst %.cc, %.cc.debug.P, $(SnippetDeformableMesh_ccfiles))))) SnippetDeformableMesh_c_debug_dep = $(addprefix $(DEPSDIR)/SnippetDeformableMesh/debug/, $(subst ./, , $(subst ../, , $(patsubst %.c, %.c.P, $(SnippetDeformableMesh_cfiles))))) SnippetDeformableMesh_debug_dep = $(SnippetDeformableMesh_cpp_debug_dep) $(SnippetDeformableMesh_cc_debug_dep) $(SnippetDeformableMesh_c_debug_dep) -include $(SnippetDeformableMesh_debug_dep) SnippetDeformableMesh_cpp_checked_dep = $(addprefix $(DEPSDIR)/SnippetDeformableMesh/checked/, $(subst ./, , $(subst ../, , $(patsubst %.cpp, %.cpp.P, $(SnippetDeformableMesh_cppfiles))))) SnippetDeformableMesh_cc_checked_dep = $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(patsubst %.cc, %.cc.checked.P, $(SnippetDeformableMesh_ccfiles))))) SnippetDeformableMesh_c_checked_dep = $(addprefix $(DEPSDIR)/SnippetDeformableMesh/checked/, $(subst ./, , $(subst ../, , $(patsubst %.c, %.c.P, $(SnippetDeformableMesh_cfiles))))) SnippetDeformableMesh_checked_dep = $(SnippetDeformableMesh_cpp_checked_dep) $(SnippetDeformableMesh_cc_checked_dep) $(SnippetDeformableMesh_c_checked_dep) -include $(SnippetDeformableMesh_checked_dep) SnippetDeformableMesh_cpp_profile_dep = $(addprefix $(DEPSDIR)/SnippetDeformableMesh/profile/, $(subst ./, , $(subst ../, , $(patsubst %.cpp, %.cpp.P, $(SnippetDeformableMesh_cppfiles))))) SnippetDeformableMesh_cc_profile_dep = $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(patsubst %.cc, %.cc.profile.P, $(SnippetDeformableMesh_ccfiles))))) SnippetDeformableMesh_c_profile_dep = $(addprefix $(DEPSDIR)/SnippetDeformableMesh/profile/, $(subst ./, , $(subst ../, , $(patsubst %.c, %.c.P, $(SnippetDeformableMesh_cfiles))))) SnippetDeformableMesh_profile_dep = $(SnippetDeformableMesh_cpp_profile_dep) $(SnippetDeformableMesh_cc_profile_dep) $(SnippetDeformableMesh_c_profile_dep) -include $(SnippetDeformableMesh_profile_dep) SnippetDeformableMesh_cpp_release_dep = $(addprefix $(DEPSDIR)/SnippetDeformableMesh/release/, $(subst ./, , $(subst ../, , $(patsubst %.cpp, %.cpp.P, $(SnippetDeformableMesh_cppfiles))))) SnippetDeformableMesh_cc_release_dep = $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(patsubst %.cc, %.cc.release.P, $(SnippetDeformableMesh_ccfiles))))) SnippetDeformableMesh_c_release_dep = $(addprefix $(DEPSDIR)/SnippetDeformableMesh/release/, $(subst ./, , $(subst ../, , $(patsubst %.c, %.c.P, $(SnippetDeformableMesh_cfiles))))) SnippetDeformableMesh_release_dep = $(SnippetDeformableMesh_cpp_release_dep) $(SnippetDeformableMesh_cc_release_dep) $(SnippetDeformableMesh_c_release_dep) -include $(SnippetDeformableMesh_release_dep) SnippetDeformableMesh_debug_hpaths := SnippetDeformableMesh_debug_hpaths += ./../../../Include SnippetDeformableMesh_debug_hpaths += ./../../../../PxShared/include SnippetDeformableMesh_debug_hpaths += ./../../../../PxShared/src/foundation/include SnippetDeformableMesh_debug_hpaths += ./../../../../PxShared/src/fastxml/include SnippetDeformableMesh_debug_hpaths += ./../../../../Externals/android-ndk-r9d/sources/cxx-stl/gnu-libstdc++/4.8/include SnippetDeformableMesh_debug_hpaths += ./../../../../Externals/android-ndk-r9d/sources/cxx-stl/gnu-libstdc++/4.8/libs/armeabi-v7a/include SnippetDeformableMesh_debug_lpaths := SnippetDeformableMesh_debug_lpaths += ./../../../../Externals/nvToolsExt/1/lib/armv7 SnippetDeformableMesh_debug_lpaths += ./../../../Lib/android16 SnippetDeformableMesh_debug_lpaths += ./../../lib/android16 SnippetDeformableMesh_debug_lpaths += ./../../../../PxShared/lib/android16 SnippetDeformableMesh_debug_lpaths += ./../../lib/android16 SnippetDeformableMesh_debug_defines := $(SnippetDeformableMesh_custom_defines) SnippetDeformableMesh_debug_defines += ANDROID SnippetDeformableMesh_debug_defines += GLES2 SnippetDeformableMesh_debug_defines += __STDC_LIMIT_MACROS SnippetDeformableMesh_debug_defines += __ARM_ARCH_5__ SnippetDeformableMesh_debug_defines += __ARM_ARCH_5T__ SnippetDeformableMesh_debug_defines += __ARM_ARCH_5E__ SnippetDeformableMesh_debug_defines += __ARM_ARCH_5TE__ SnippetDeformableMesh_debug_defines += _DEBUG SnippetDeformableMesh_debug_defines += PX_DEBUG=1 SnippetDeformableMesh_debug_defines += PX_CHECKED=1 SnippetDeformableMesh_debug_defines += PX_SUPPORT_PVD=1 SnippetDeformableMesh_debug_defines += PX_NVTX=1 SnippetDeformableMesh_debug_defines += PHYSX_PROFILE_SDK SnippetDeformableMesh_debug_libraries := SnippetDeformableMesh_debug_libraries += SnippetUtilsDEBUG SnippetDeformableMesh_debug_libraries += log SnippetDeformableMesh_debug_libraries += gnustl_static SnippetDeformableMesh_debug_libraries += supc++ SnippetDeformableMesh_debug_libraries += m SnippetDeformableMesh_debug_libraries += c SnippetDeformableMesh_debug_libraries += nvToolsExt SnippetDeformableMesh_debug_libraries += PhysX3CommonDEBUG SnippetDeformableMesh_debug_libraries += PhysX3DEBUG SnippetDeformableMesh_debug_libraries += PhysX3VehicleDEBUG SnippetDeformableMesh_debug_libraries += PhysX3CookingDEBUG SnippetDeformableMesh_debug_libraries += PhysX3ExtensionsDEBUG SnippetDeformableMesh_debug_libraries += PhysX3CharacterKinematicDEBUG SnippetDeformableMesh_debug_libraries += SnippetUtilsDEBUG SnippetDeformableMesh_debug_libraries += PxFoundationDEBUG SnippetDeformableMesh_debug_libraries += PsFastXmlDEBUG SnippetDeformableMesh_debug_libraries += PxPvdSDKDEBUG SnippetDeformableMesh_debug_common_cflags := $(SnippetDeformableMesh_custom_cflags) SnippetDeformableMesh_debug_common_cflags += -MMD SnippetDeformableMesh_debug_common_cflags += $(addprefix -D, $(SnippetDeformableMesh_debug_defines)) SnippetDeformableMesh_debug_common_cflags += $(addprefix -I, $(SnippetDeformableMesh_debug_hpaths)) SnippetDeformableMesh_debug_common_cflags += -Werror SnippetDeformableMesh_debug_common_cflags += -fpic -fno-exceptions SnippetDeformableMesh_debug_common_cflags += -isysroot ../../../../Externals/android-ndk-r9d/platforms/android-16/arch-arm SnippetDeformableMesh_debug_common_cflags += -march=armv7-a -mfpu=neon -marm -mfloat-abi=softfp -mthumb-interwork SnippetDeformableMesh_debug_common_cflags += -Wall -Wextra -Wpedantic -Wstrict-aliasing=2 SnippetDeformableMesh_debug_common_cflags += -Wno-maybe-uninitialized SnippetDeformableMesh_debug_common_cflags += -Wno-pedantic SnippetDeformableMesh_debug_common_cflags += -fPIE SnippetDeformableMesh_debug_common_cflags += -g3 -gdwarf-2 SnippetDeformableMesh_debug_cflags := $(SnippetDeformableMesh_debug_common_cflags) SnippetDeformableMesh_debug_cppflags := $(SnippetDeformableMesh_debug_common_cflags) SnippetDeformableMesh_debug_cppflags += -fno-rtti SnippetDeformableMesh_debug_lflags := $(SnippetDeformableMesh_custom_lflags) SnippetDeformableMesh_debug_lflags += $(addprefix -L, $(SnippetDeformableMesh_debug_lpaths)) SnippetDeformableMesh_debug_lflags += -Wl,--start-group $(addprefix -l, $(SnippetDeformableMesh_debug_libraries)) -Wl,--end-group SnippetDeformableMesh_debug_lflags += --sysroot=../../../../Externals/android-ndk-r9d/platforms/android-16/arch-arm SnippetDeformableMesh_debug_lflags += -Wl,--no-undefined SnippetDeformableMesh_debug_lflags += -Wl,-z,noexecstack -L../../../../Externals/android-ndk-r9d/sources/cxx-stl/gnu-libstdc++/4.8/libs/armeabi-v7a -Wl,-rpath-link=../../../../Externals/android-ndk-r9d/platforms/android-16/arch-arm/usr/lib SnippetDeformableMesh_debug_lflags += -pie SnippetDeformableMesh_debug_objsdir = $(OBJS_DIR)/SnippetDeformableMesh_debug SnippetDeformableMesh_debug_cpp_o = $(addprefix $(SnippetDeformableMesh_debug_objsdir)/, $(subst ./, , $(subst ../, , $(patsubst %.cpp, %.cpp.o, $(SnippetDeformableMesh_cppfiles))))) SnippetDeformableMesh_debug_cc_o = $(addprefix $(SnippetDeformableMesh_debug_objsdir)/, $(subst ./, , $(subst ../, , $(patsubst %.cc, %.cc.o, $(SnippetDeformableMesh_ccfiles))))) SnippetDeformableMesh_debug_c_o = $(addprefix $(SnippetDeformableMesh_debug_objsdir)/, $(subst ./, , $(subst ../, , $(patsubst %.c, %.c.o, $(SnippetDeformableMesh_cfiles))))) SnippetDeformableMesh_debug_obj = $(SnippetDeformableMesh_debug_cpp_o) $(SnippetDeformableMesh_debug_cc_o) $(SnippetDeformableMesh_debug_c_o) SnippetDeformableMesh_debug_bin := ./../../../Bin/android16/SnippetDeformableMeshDEBUG clean_SnippetDeformableMesh_debug: @$(ECHO) clean SnippetDeformableMesh debug @$(RMDIR) $(SnippetDeformableMesh_debug_objsdir) @$(RMDIR) $(SnippetDeformableMesh_debug_bin) @$(RMDIR) $(DEPSDIR)/SnippetDeformableMesh/debug build_SnippetDeformableMesh_debug: postbuild_SnippetDeformableMesh_debug postbuild_SnippetDeformableMesh_debug: mainbuild_SnippetDeformableMesh_debug mainbuild_SnippetDeformableMesh_debug: prebuild_SnippetDeformableMesh_debug $(SnippetDeformableMesh_debug_bin) prebuild_SnippetDeformableMesh_debug: $(SnippetDeformableMesh_debug_bin): $(SnippetDeformableMesh_debug_obj) build_SnippetUtils_debug mkdir -p `dirname ./../../../Bin/android16/SnippetDeformableMeshDEBUG` $(CCLD) $(SnippetDeformableMesh_debug_obj) $(SnippetDeformableMesh_debug_lflags) -o $(SnippetDeformableMesh_debug_bin) $(ECHO) building $@ complete! SnippetDeformableMesh_debug_DEPDIR = $(dir $(@))/$(*F) $(SnippetDeformableMesh_debug_cpp_o): $(SnippetDeformableMesh_debug_objsdir)/%.o: $(ECHO) SnippetDeformableMesh: compiling debug $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles))... mkdir -p $(dir $(@)) $(CXX) $(SnippetDeformableMesh_debug_cppflags) -c $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles)) -o $@ @mkdir -p $(dir $(addprefix $(DEPSDIR)/SnippetDeformableMesh/debug/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles)))))) cp $(SnippetDeformableMesh_debug_DEPDIR).d $(addprefix $(DEPSDIR)/SnippetDeformableMesh/debug/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles))))).P; \ sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \ -e '/^$$/ d' -e 's/$$/ :/' < $(SnippetDeformableMesh_debug_DEPDIR).d >> $(addprefix $(DEPSDIR)/SnippetDeformableMesh/debug/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles))))).P; \ rm -f $(SnippetDeformableMesh_debug_DEPDIR).d $(SnippetDeformableMesh_debug_cc_o): $(SnippetDeformableMesh_debug_objsdir)/%.o: $(ECHO) SnippetDeformableMesh: compiling debug $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles))... mkdir -p $(dir $(@)) $(CXX) $(SnippetDeformableMesh_debug_cppflags) -c $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles)) -o $@ mkdir -p $(dir $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles)))))) cp $(SnippetDeformableMesh_debug_DEPDIR).d $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles))))).debug.P; \ sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \ -e '/^$$/ d' -e 's/$$/ :/' < $(SnippetDeformableMesh_debug_DEPDIR).d >> $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles))))).debug.P; \ rm -f $(SnippetDeformableMesh_debug_DEPDIR).d $(SnippetDeformableMesh_debug_c_o): $(SnippetDeformableMesh_debug_objsdir)/%.o: $(ECHO) SnippetDeformableMesh: compiling debug $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_cfiles))... mkdir -p $(dir $(@)) $(CC) $(SnippetDeformableMesh_debug_cflags) -c $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_cfiles)) -o $@ @mkdir -p $(dir $(addprefix $(DEPSDIR)/SnippetDeformableMesh/debug/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_cfiles)))))) cp $(SnippetDeformableMesh_debug_DEPDIR).d $(addprefix $(DEPSDIR)/SnippetDeformableMesh/debug/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_cfiles))))).P; \ sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \ -e '/^$$/ d' -e 's/$$/ :/' < $(SnippetDeformableMesh_debug_DEPDIR).d >> $(addprefix $(DEPSDIR)/SnippetDeformableMesh/debug/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_debug_objsdir),, $@))), $(SnippetDeformableMesh_cfiles))))).P; \ rm -f $(SnippetDeformableMesh_debug_DEPDIR).d SnippetDeformableMesh_checked_hpaths := SnippetDeformableMesh_checked_hpaths += ./../../../Include SnippetDeformableMesh_checked_hpaths += ./../../../../PxShared/include SnippetDeformableMesh_checked_hpaths += ./../../../../PxShared/src/foundation/include SnippetDeformableMesh_checked_hpaths += ./../../../../PxShared/src/fastxml/include SnippetDeformableMesh_checked_hpaths += ./../../../../Externals/android-ndk-r9d/sources/cxx-stl/gnu-libstdc++/4.8/include SnippetDeformableMesh_checked_hpaths += ./../../../../Externals/android-ndk-r9d/sources/cxx-stl/gnu-libstdc++/4.8/libs/armeabi-v7a/include SnippetDeformableMesh_checked_lpaths := SnippetDeformableMesh_checked_lpaths += ./../../../../Externals/nvToolsExt/1/lib/armv7 SnippetDeformableMesh_checked_lpaths += ./../../../Lib/android16 SnippetDeformableMesh_checked_lpaths += ./../../lib/android16 SnippetDeformableMesh_checked_lpaths += ./../../../../PxShared/lib/android16 SnippetDeformableMesh_checked_lpaths += ./../../lib/android16 SnippetDeformableMesh_checked_defines := $(SnippetDeformableMesh_custom_defines) SnippetDeformableMesh_checked_defines += ANDROID SnippetDeformableMesh_checked_defines += GLES2 SnippetDeformableMesh_checked_defines += __STDC_LIMIT_MACROS SnippetDeformableMesh_checked_defines += __ARM_ARCH_5__ SnippetDeformableMesh_checked_defines += __ARM_ARCH_5T__ SnippetDeformableMesh_checked_defines += __ARM_ARCH_5E__ SnippetDeformableMesh_checked_defines += __ARM_ARCH_5TE__ SnippetDeformableMesh_checked_defines += NDEBUG SnippetDeformableMesh_checked_defines += PX_CHECKED=1 SnippetDeformableMesh_checked_defines += PX_SUPPORT_PVD=1 SnippetDeformableMesh_checked_defines += PX_NVTX=1 SnippetDeformableMesh_checked_defines += PHYSX_PROFILE_SDK SnippetDeformableMesh_checked_libraries := SnippetDeformableMesh_checked_libraries += SnippetUtilsCHECKED SnippetDeformableMesh_checked_libraries += log SnippetDeformableMesh_checked_libraries += gnustl_static SnippetDeformableMesh_checked_libraries += supc++ SnippetDeformableMesh_checked_libraries += m SnippetDeformableMesh_checked_libraries += c SnippetDeformableMesh_checked_libraries += nvToolsExt SnippetDeformableMesh_checked_libraries += PhysX3CommonCHECKED SnippetDeformableMesh_checked_libraries += PhysX3CHECKED SnippetDeformableMesh_checked_libraries += PhysX3VehicleCHECKED SnippetDeformableMesh_checked_libraries += PhysX3CookingCHECKED SnippetDeformableMesh_checked_libraries += PhysX3ExtensionsCHECKED SnippetDeformableMesh_checked_libraries += PhysX3CharacterKinematicCHECKED SnippetDeformableMesh_checked_libraries += SnippetUtilsCHECKED SnippetDeformableMesh_checked_libraries += PxFoundationCHECKED SnippetDeformableMesh_checked_libraries += PsFastXmlCHECKED SnippetDeformableMesh_checked_libraries += PxPvdSDKCHECKED SnippetDeformableMesh_checked_common_cflags := $(SnippetDeformableMesh_custom_cflags) SnippetDeformableMesh_checked_common_cflags += -MMD SnippetDeformableMesh_checked_common_cflags += $(addprefix -D, $(SnippetDeformableMesh_checked_defines)) SnippetDeformableMesh_checked_common_cflags += $(addprefix -I, $(SnippetDeformableMesh_checked_hpaths)) SnippetDeformableMesh_checked_common_cflags += -Werror SnippetDeformableMesh_checked_common_cflags += -fpic -fno-exceptions SnippetDeformableMesh_checked_common_cflags += -isysroot ../../../../Externals/android-ndk-r9d/platforms/android-16/arch-arm SnippetDeformableMesh_checked_common_cflags += -march=armv7-a -mfpu=neon -marm -mfloat-abi=softfp -mthumb-interwork SnippetDeformableMesh_checked_common_cflags += -Wall -Wextra -Wpedantic -Wstrict-aliasing=2 SnippetDeformableMesh_checked_common_cflags += -Wno-maybe-uninitialized SnippetDeformableMesh_checked_common_cflags += -Wno-pedantic SnippetDeformableMesh_checked_common_cflags += -fPIE SnippetDeformableMesh_checked_common_cflags += -g3 -gdwarf-2 -O3 -fno-strict-aliasing SnippetDeformableMesh_checked_common_cflags += -ffunction-sections -funwind-tables -fstack-protector SnippetDeformableMesh_checked_common_cflags += -fomit-frame-pointer -funswitch-loops -finline-limit=300 SnippetDeformableMesh_checked_cflags := $(SnippetDeformableMesh_checked_common_cflags) SnippetDeformableMesh_checked_cppflags := $(SnippetDeformableMesh_checked_common_cflags) SnippetDeformableMesh_checked_cppflags += -fno-rtti SnippetDeformableMesh_checked_lflags := $(SnippetDeformableMesh_custom_lflags) SnippetDeformableMesh_checked_lflags += $(addprefix -L, $(SnippetDeformableMesh_checked_lpaths)) SnippetDeformableMesh_checked_lflags += -Wl,--start-group $(addprefix -l, $(SnippetDeformableMesh_checked_libraries)) -Wl,--end-group SnippetDeformableMesh_checked_lflags += --sysroot=../../../../Externals/android-ndk-r9d/platforms/android-16/arch-arm SnippetDeformableMesh_checked_lflags += -Wl,--no-undefined SnippetDeformableMesh_checked_lflags += -Wl,-z,noexecstack -L../../../../Externals/android-ndk-r9d/sources/cxx-stl/gnu-libstdc++/4.8/libs/armeabi-v7a -Wl,-rpath-link=../../../../Externals/android-ndk-r9d/platforms/android-16/arch-arm/usr/lib SnippetDeformableMesh_checked_lflags += -pie SnippetDeformableMesh_checked_objsdir = $(OBJS_DIR)/SnippetDeformableMesh_checked SnippetDeformableMesh_checked_cpp_o = $(addprefix $(SnippetDeformableMesh_checked_objsdir)/, $(subst ./, , $(subst ../, , $(patsubst %.cpp, %.cpp.o, $(SnippetDeformableMesh_cppfiles))))) SnippetDeformableMesh_checked_cc_o = $(addprefix $(SnippetDeformableMesh_checked_objsdir)/, $(subst ./, , $(subst ../, , $(patsubst %.cc, %.cc.o, $(SnippetDeformableMesh_ccfiles))))) SnippetDeformableMesh_checked_c_o = $(addprefix $(SnippetDeformableMesh_checked_objsdir)/, $(subst ./, , $(subst ../, , $(patsubst %.c, %.c.o, $(SnippetDeformableMesh_cfiles))))) SnippetDeformableMesh_checked_obj = $(SnippetDeformableMesh_checked_cpp_o) $(SnippetDeformableMesh_checked_cc_o) $(SnippetDeformableMesh_checked_c_o) SnippetDeformableMesh_checked_bin := ./../../../Bin/android16/SnippetDeformableMeshCHECKED clean_SnippetDeformableMesh_checked: @$(ECHO) clean SnippetDeformableMesh checked @$(RMDIR) $(SnippetDeformableMesh_checked_objsdir) @$(RMDIR) $(SnippetDeformableMesh_checked_bin) @$(RMDIR) $(DEPSDIR)/SnippetDeformableMesh/checked build_SnippetDeformableMesh_checked: postbuild_SnippetDeformableMesh_checked postbuild_SnippetDeformableMesh_checked: mainbuild_SnippetDeformableMesh_checked mainbuild_SnippetDeformableMesh_checked: prebuild_SnippetDeformableMesh_checked $(SnippetDeformableMesh_checked_bin) prebuild_SnippetDeformableMesh_checked: $(SnippetDeformableMesh_checked_bin): $(SnippetDeformableMesh_checked_obj) build_SnippetUtils_checked mkdir -p `dirname ./../../../Bin/android16/SnippetDeformableMeshCHECKED` $(CCLD) $(SnippetDeformableMesh_checked_obj) $(SnippetDeformableMesh_checked_lflags) -o $(SnippetDeformableMesh_checked_bin) $(ECHO) building $@ complete! SnippetDeformableMesh_checked_DEPDIR = $(dir $(@))/$(*F) $(SnippetDeformableMesh_checked_cpp_o): $(SnippetDeformableMesh_checked_objsdir)/%.o: $(ECHO) SnippetDeformableMesh: compiling checked $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles))... mkdir -p $(dir $(@)) $(CXX) $(SnippetDeformableMesh_checked_cppflags) -c $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles)) -o $@ @mkdir -p $(dir $(addprefix $(DEPSDIR)/SnippetDeformableMesh/checked/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles)))))) cp $(SnippetDeformableMesh_checked_DEPDIR).d $(addprefix $(DEPSDIR)/SnippetDeformableMesh/checked/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles))))).P; \ sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \ -e '/^$$/ d' -e 's/$$/ :/' < $(SnippetDeformableMesh_checked_DEPDIR).d >> $(addprefix $(DEPSDIR)/SnippetDeformableMesh/checked/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles))))).P; \ rm -f $(SnippetDeformableMesh_checked_DEPDIR).d $(SnippetDeformableMesh_checked_cc_o): $(SnippetDeformableMesh_checked_objsdir)/%.o: $(ECHO) SnippetDeformableMesh: compiling checked $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles))... mkdir -p $(dir $(@)) $(CXX) $(SnippetDeformableMesh_checked_cppflags) -c $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles)) -o $@ mkdir -p $(dir $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles)))))) cp $(SnippetDeformableMesh_checked_DEPDIR).d $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles))))).checked.P; \ sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \ -e '/^$$/ d' -e 's/$$/ :/' < $(SnippetDeformableMesh_checked_DEPDIR).d >> $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles))))).checked.P; \ rm -f $(SnippetDeformableMesh_checked_DEPDIR).d $(SnippetDeformableMesh_checked_c_o): $(SnippetDeformableMesh_checked_objsdir)/%.o: $(ECHO) SnippetDeformableMesh: compiling checked $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_cfiles))... mkdir -p $(dir $(@)) $(CC) $(SnippetDeformableMesh_checked_cflags) -c $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_cfiles)) -o $@ @mkdir -p $(dir $(addprefix $(DEPSDIR)/SnippetDeformableMesh/checked/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_cfiles)))))) cp $(SnippetDeformableMesh_checked_DEPDIR).d $(addprefix $(DEPSDIR)/SnippetDeformableMesh/checked/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_cfiles))))).P; \ sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \ -e '/^$$/ d' -e 's/$$/ :/' < $(SnippetDeformableMesh_checked_DEPDIR).d >> $(addprefix $(DEPSDIR)/SnippetDeformableMesh/checked/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_checked_objsdir),, $@))), $(SnippetDeformableMesh_cfiles))))).P; \ rm -f $(SnippetDeformableMesh_checked_DEPDIR).d SnippetDeformableMesh_profile_hpaths := SnippetDeformableMesh_profile_hpaths += ./../../../Include SnippetDeformableMesh_profile_hpaths += ./../../../../PxShared/include SnippetDeformableMesh_profile_hpaths += ./../../../../PxShared/src/foundation/include SnippetDeformableMesh_profile_hpaths += ./../../../../PxShared/src/fastxml/include SnippetDeformableMesh_profile_hpaths += ./../../../../Externals/android-ndk-r9d/sources/cxx-stl/gnu-libstdc++/4.8/include SnippetDeformableMesh_profile_hpaths += ./../../../../Externals/android-ndk-r9d/sources/cxx-stl/gnu-libstdc++/4.8/libs/armeabi-v7a/include SnippetDeformableMesh_profile_lpaths := SnippetDeformableMesh_profile_lpaths += ./../../../../Externals/nvToolsExt/1/lib/armv7 SnippetDeformableMesh_profile_lpaths += ./../../../Lib/android16 SnippetDeformableMesh_profile_lpaths += ./../../lib/android16 SnippetDeformableMesh_profile_lpaths += ./../../../../PxShared/lib/android16 SnippetDeformableMesh_profile_lpaths += ./../../lib/android16 SnippetDeformableMesh_profile_defines := $(SnippetDeformableMesh_custom_defines) SnippetDeformableMesh_profile_defines += ANDROID SnippetDeformableMesh_profile_defines += GLES2 SnippetDeformableMesh_profile_defines += __STDC_LIMIT_MACROS SnippetDeformableMesh_profile_defines += __ARM_ARCH_5__ SnippetDeformableMesh_profile_defines += __ARM_ARCH_5T__ SnippetDeformableMesh_profile_defines += __ARM_ARCH_5E__ SnippetDeformableMesh_profile_defines += __ARM_ARCH_5TE__ SnippetDeformableMesh_profile_defines += NDEBUG SnippetDeformableMesh_profile_defines += PX_PROFILE=1 SnippetDeformableMesh_profile_defines += PX_SUPPORT_PVD=1 SnippetDeformableMesh_profile_defines += PX_NVTX=1 SnippetDeformableMesh_profile_defines += PHYSX_PROFILE_SDK SnippetDeformableMesh_profile_libraries := SnippetDeformableMesh_profile_libraries += SnippetUtilsPROFILE SnippetDeformableMesh_profile_libraries += log SnippetDeformableMesh_profile_libraries += gnustl_static SnippetDeformableMesh_profile_libraries += supc++ SnippetDeformableMesh_profile_libraries += m SnippetDeformableMesh_profile_libraries += c SnippetDeformableMesh_profile_libraries += nvToolsExt SnippetDeformableMesh_profile_libraries += PhysX3CommonPROFILE SnippetDeformableMesh_profile_libraries += PhysX3PROFILE SnippetDeformableMesh_profile_libraries += PhysX3VehiclePROFILE SnippetDeformableMesh_profile_libraries += PhysX3CookingPROFILE SnippetDeformableMesh_profile_libraries += PhysX3ExtensionsPROFILE SnippetDeformableMesh_profile_libraries += PhysX3CharacterKinematicPROFILE SnippetDeformableMesh_profile_libraries += SnippetUtilsPROFILE SnippetDeformableMesh_profile_libraries += PxFoundationPROFILE SnippetDeformableMesh_profile_libraries += PsFastXmlPROFILE SnippetDeformableMesh_profile_libraries += PxPvdSDKPROFILE SnippetDeformableMesh_profile_common_cflags := $(SnippetDeformableMesh_custom_cflags) SnippetDeformableMesh_profile_common_cflags += -MMD SnippetDeformableMesh_profile_common_cflags += $(addprefix -D, $(SnippetDeformableMesh_profile_defines)) SnippetDeformableMesh_profile_common_cflags += $(addprefix -I, $(SnippetDeformableMesh_profile_hpaths)) SnippetDeformableMesh_profile_common_cflags += -Werror SnippetDeformableMesh_profile_common_cflags += -fpic -fno-exceptions SnippetDeformableMesh_profile_common_cflags += -isysroot ../../../../Externals/android-ndk-r9d/platforms/android-16/arch-arm SnippetDeformableMesh_profile_common_cflags += -march=armv7-a -mfpu=neon -marm -mfloat-abi=softfp -mthumb-interwork SnippetDeformableMesh_profile_common_cflags += -Wall -Wextra -Wpedantic -Wstrict-aliasing=2 SnippetDeformableMesh_profile_common_cflags += -Wno-maybe-uninitialized SnippetDeformableMesh_profile_common_cflags += -Wno-pedantic SnippetDeformableMesh_profile_common_cflags += -fPIE SnippetDeformableMesh_profile_common_cflags += -O3 -fno-strict-aliasing SnippetDeformableMesh_profile_common_cflags += -ffunction-sections -funwind-tables -fstack-protector SnippetDeformableMesh_profile_common_cflags += -fno-omit-frame-pointer -funswitch-loops -finline-limit=300 SnippetDeformableMesh_profile_cflags := $(SnippetDeformableMesh_profile_common_cflags) SnippetDeformableMesh_profile_cppflags := $(SnippetDeformableMesh_profile_common_cflags) SnippetDeformableMesh_profile_cppflags += -fno-rtti SnippetDeformableMesh_profile_lflags := $(SnippetDeformableMesh_custom_lflags) SnippetDeformableMesh_profile_lflags += $(addprefix -L, $(SnippetDeformableMesh_profile_lpaths)) SnippetDeformableMesh_profile_lflags += -Wl,--start-group $(addprefix -l, $(SnippetDeformableMesh_profile_libraries)) -Wl,--end-group SnippetDeformableMesh_profile_lflags += --sysroot=../../../../Externals/android-ndk-r9d/platforms/android-16/arch-arm SnippetDeformableMesh_profile_lflags += -Wl,--no-undefined SnippetDeformableMesh_profile_lflags += -Wl,-z,noexecstack -L../../../../Externals/android-ndk-r9d/sources/cxx-stl/gnu-libstdc++/4.8/libs/armeabi-v7a -Wl,-rpath-link=../../../../Externals/android-ndk-r9d/platforms/android-16/arch-arm/usr/lib SnippetDeformableMesh_profile_lflags += -pie SnippetDeformableMesh_profile_objsdir = $(OBJS_DIR)/SnippetDeformableMesh_profile SnippetDeformableMesh_profile_cpp_o = $(addprefix $(SnippetDeformableMesh_profile_objsdir)/, $(subst ./, , $(subst ../, , $(patsubst %.cpp, %.cpp.o, $(SnippetDeformableMesh_cppfiles))))) SnippetDeformableMesh_profile_cc_o = $(addprefix $(SnippetDeformableMesh_profile_objsdir)/, $(subst ./, , $(subst ../, , $(patsubst %.cc, %.cc.o, $(SnippetDeformableMesh_ccfiles))))) SnippetDeformableMesh_profile_c_o = $(addprefix $(SnippetDeformableMesh_profile_objsdir)/, $(subst ./, , $(subst ../, , $(patsubst %.c, %.c.o, $(SnippetDeformableMesh_cfiles))))) SnippetDeformableMesh_profile_obj = $(SnippetDeformableMesh_profile_cpp_o) $(SnippetDeformableMesh_profile_cc_o) $(SnippetDeformableMesh_profile_c_o) SnippetDeformableMesh_profile_bin := ./../../../Bin/android16/SnippetDeformableMeshPROFILE clean_SnippetDeformableMesh_profile: @$(ECHO) clean SnippetDeformableMesh profile @$(RMDIR) $(SnippetDeformableMesh_profile_objsdir) @$(RMDIR) $(SnippetDeformableMesh_profile_bin) @$(RMDIR) $(DEPSDIR)/SnippetDeformableMesh/profile build_SnippetDeformableMesh_profile: postbuild_SnippetDeformableMesh_profile postbuild_SnippetDeformableMesh_profile: mainbuild_SnippetDeformableMesh_profile mainbuild_SnippetDeformableMesh_profile: prebuild_SnippetDeformableMesh_profile $(SnippetDeformableMesh_profile_bin) prebuild_SnippetDeformableMesh_profile: $(SnippetDeformableMesh_profile_bin): $(SnippetDeformableMesh_profile_obj) build_SnippetUtils_profile mkdir -p `dirname ./../../../Bin/android16/SnippetDeformableMeshPROFILE` $(CCLD) $(SnippetDeformableMesh_profile_obj) $(SnippetDeformableMesh_profile_lflags) -o $(SnippetDeformableMesh_profile_bin) $(ECHO) building $@ complete! SnippetDeformableMesh_profile_DEPDIR = $(dir $(@))/$(*F) $(SnippetDeformableMesh_profile_cpp_o): $(SnippetDeformableMesh_profile_objsdir)/%.o: $(ECHO) SnippetDeformableMesh: compiling profile $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles))... mkdir -p $(dir $(@)) $(CXX) $(SnippetDeformableMesh_profile_cppflags) -c $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles)) -o $@ @mkdir -p $(dir $(addprefix $(DEPSDIR)/SnippetDeformableMesh/profile/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles)))))) cp $(SnippetDeformableMesh_profile_DEPDIR).d $(addprefix $(DEPSDIR)/SnippetDeformableMesh/profile/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles))))).P; \ sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \ -e '/^$$/ d' -e 's/$$/ :/' < $(SnippetDeformableMesh_profile_DEPDIR).d >> $(addprefix $(DEPSDIR)/SnippetDeformableMesh/profile/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles))))).P; \ rm -f $(SnippetDeformableMesh_profile_DEPDIR).d $(SnippetDeformableMesh_profile_cc_o): $(SnippetDeformableMesh_profile_objsdir)/%.o: $(ECHO) SnippetDeformableMesh: compiling profile $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles))... mkdir -p $(dir $(@)) $(CXX) $(SnippetDeformableMesh_profile_cppflags) -c $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles)) -o $@ mkdir -p $(dir $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles)))))) cp $(SnippetDeformableMesh_profile_DEPDIR).d $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles))))).profile.P; \ sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \ -e '/^$$/ d' -e 's/$$/ :/' < $(SnippetDeformableMesh_profile_DEPDIR).d >> $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles))))).profile.P; \ rm -f $(SnippetDeformableMesh_profile_DEPDIR).d $(SnippetDeformableMesh_profile_c_o): $(SnippetDeformableMesh_profile_objsdir)/%.o: $(ECHO) SnippetDeformableMesh: compiling profile $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_cfiles))... mkdir -p $(dir $(@)) $(CC) $(SnippetDeformableMesh_profile_cflags) -c $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_cfiles)) -o $@ @mkdir -p $(dir $(addprefix $(DEPSDIR)/SnippetDeformableMesh/profile/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_cfiles)))))) cp $(SnippetDeformableMesh_profile_DEPDIR).d $(addprefix $(DEPSDIR)/SnippetDeformableMesh/profile/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_cfiles))))).P; \ sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \ -e '/^$$/ d' -e 's/$$/ :/' < $(SnippetDeformableMesh_profile_DEPDIR).d >> $(addprefix $(DEPSDIR)/SnippetDeformableMesh/profile/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_profile_objsdir),, $@))), $(SnippetDeformableMesh_cfiles))))).P; \ rm -f $(SnippetDeformableMesh_profile_DEPDIR).d SnippetDeformableMesh_release_hpaths := SnippetDeformableMesh_release_hpaths += ./../../../Include SnippetDeformableMesh_release_hpaths += ./../../../../PxShared/include SnippetDeformableMesh_release_hpaths += ./../../../../PxShared/src/foundation/include SnippetDeformableMesh_release_hpaths += ./../../../../PxShared/src/fastxml/include SnippetDeformableMesh_release_hpaths += ./../../../../Externals/android-ndk-r9d/sources/cxx-stl/gnu-libstdc++/4.8/include SnippetDeformableMesh_release_hpaths += ./../../../../Externals/android-ndk-r9d/sources/cxx-stl/gnu-libstdc++/4.8/libs/armeabi-v7a/include SnippetDeformableMesh_release_lpaths := SnippetDeformableMesh_release_lpaths += ./../../../../Externals/nvToolsExt/1/lib/armv7 SnippetDeformableMesh_release_lpaths += ./../../../Lib/android16 SnippetDeformableMesh_release_lpaths += ./../../lib/android16 SnippetDeformableMesh_release_lpaths += ./../../../../PxShared/lib/android16 SnippetDeformableMesh_release_lpaths += ./../../lib/android16 SnippetDeformableMesh_release_defines := $(SnippetDeformableMesh_custom_defines) SnippetDeformableMesh_release_defines += ANDROID SnippetDeformableMesh_release_defines += GLES2 SnippetDeformableMesh_release_defines += __STDC_LIMIT_MACROS SnippetDeformableMesh_release_defines += __ARM_ARCH_5__ SnippetDeformableMesh_release_defines += __ARM_ARCH_5T__ SnippetDeformableMesh_release_defines += __ARM_ARCH_5E__ SnippetDeformableMesh_release_defines += __ARM_ARCH_5TE__ SnippetDeformableMesh_release_defines += NDEBUG SnippetDeformableMesh_release_defines += PX_SUPPORT_PVD=0 SnippetDeformableMesh_release_defines += PHYSX_PROFILE_SDK SnippetDeformableMesh_release_libraries := SnippetDeformableMesh_release_libraries += SnippetUtils SnippetDeformableMesh_release_libraries += log SnippetDeformableMesh_release_libraries += gnustl_static SnippetDeformableMesh_release_libraries += supc++ SnippetDeformableMesh_release_libraries += m SnippetDeformableMesh_release_libraries += c SnippetDeformableMesh_release_libraries += nvToolsExt SnippetDeformableMesh_release_libraries += PhysX3Common SnippetDeformableMesh_release_libraries += PhysX3 SnippetDeformableMesh_release_libraries += PhysX3Vehicle SnippetDeformableMesh_release_libraries += PhysX3Cooking SnippetDeformableMesh_release_libraries += PhysX3Extensions SnippetDeformableMesh_release_libraries += PhysX3CharacterKinematic SnippetDeformableMesh_release_libraries += SnippetUtils SnippetDeformableMesh_release_libraries += PxFoundation SnippetDeformableMesh_release_libraries += PsFastXml SnippetDeformableMesh_release_libraries += PxPvdSDK SnippetDeformableMesh_release_common_cflags := $(SnippetDeformableMesh_custom_cflags) SnippetDeformableMesh_release_common_cflags += -MMD SnippetDeformableMesh_release_common_cflags += $(addprefix -D, $(SnippetDeformableMesh_release_defines)) SnippetDeformableMesh_release_common_cflags += $(addprefix -I, $(SnippetDeformableMesh_release_hpaths)) SnippetDeformableMesh_release_common_cflags += -Werror SnippetDeformableMesh_release_common_cflags += -fpic -fno-exceptions SnippetDeformableMesh_release_common_cflags += -isysroot ../../../../Externals/android-ndk-r9d/platforms/android-16/arch-arm SnippetDeformableMesh_release_common_cflags += -march=armv7-a -mfpu=neon -marm -mfloat-abi=softfp -mthumb-interwork SnippetDeformableMesh_release_common_cflags += -Wall -Wextra -Wpedantic -Wstrict-aliasing=2 SnippetDeformableMesh_release_common_cflags += -Wno-maybe-uninitialized SnippetDeformableMesh_release_common_cflags += -Wno-pedantic SnippetDeformableMesh_release_common_cflags += -fPIE SnippetDeformableMesh_release_common_cflags += -O3 -fno-strict-aliasing SnippetDeformableMesh_release_common_cflags += -ffunction-sections -funwind-tables -fstack-protector SnippetDeformableMesh_release_common_cflags += -fomit-frame-pointer -funswitch-loops -finline-limit=300 SnippetDeformableMesh_release_cflags := $(SnippetDeformableMesh_release_common_cflags) SnippetDeformableMesh_release_cppflags := $(SnippetDeformableMesh_release_common_cflags) SnippetDeformableMesh_release_cppflags += -fno-rtti SnippetDeformableMesh_release_lflags := $(SnippetDeformableMesh_custom_lflags) SnippetDeformableMesh_release_lflags += $(addprefix -L, $(SnippetDeformableMesh_release_lpaths)) SnippetDeformableMesh_release_lflags += -Wl,--start-group $(addprefix -l, $(SnippetDeformableMesh_release_libraries)) -Wl,--end-group SnippetDeformableMesh_release_lflags += --sysroot=../../../../Externals/android-ndk-r9d/platforms/android-16/arch-arm SnippetDeformableMesh_release_lflags += -Wl,--no-undefined SnippetDeformableMesh_release_lflags += -Wl,-z,noexecstack -L../../../../Externals/android-ndk-r9d/sources/cxx-stl/gnu-libstdc++/4.8/libs/armeabi-v7a -Wl,-rpath-link=../../../../Externals/android-ndk-r9d/platforms/android-16/arch-arm/usr/lib SnippetDeformableMesh_release_lflags += -pie SnippetDeformableMesh_release_objsdir = $(OBJS_DIR)/SnippetDeformableMesh_release SnippetDeformableMesh_release_cpp_o = $(addprefix $(SnippetDeformableMesh_release_objsdir)/, $(subst ./, , $(subst ../, , $(patsubst %.cpp, %.cpp.o, $(SnippetDeformableMesh_cppfiles))))) SnippetDeformableMesh_release_cc_o = $(addprefix $(SnippetDeformableMesh_release_objsdir)/, $(subst ./, , $(subst ../, , $(patsubst %.cc, %.cc.o, $(SnippetDeformableMesh_ccfiles))))) SnippetDeformableMesh_release_c_o = $(addprefix $(SnippetDeformableMesh_release_objsdir)/, $(subst ./, , $(subst ../, , $(patsubst %.c, %.c.o, $(SnippetDeformableMesh_cfiles))))) SnippetDeformableMesh_release_obj = $(SnippetDeformableMesh_release_cpp_o) $(SnippetDeformableMesh_release_cc_o) $(SnippetDeformableMesh_release_c_o) SnippetDeformableMesh_release_bin := ./../../../Bin/android16/SnippetDeformableMesh clean_SnippetDeformableMesh_release: @$(ECHO) clean SnippetDeformableMesh release @$(RMDIR) $(SnippetDeformableMesh_release_objsdir) @$(RMDIR) $(SnippetDeformableMesh_release_bin) @$(RMDIR) $(DEPSDIR)/SnippetDeformableMesh/release build_SnippetDeformableMesh_release: postbuild_SnippetDeformableMesh_release postbuild_SnippetDeformableMesh_release: mainbuild_SnippetDeformableMesh_release mainbuild_SnippetDeformableMesh_release: prebuild_SnippetDeformableMesh_release $(SnippetDeformableMesh_release_bin) prebuild_SnippetDeformableMesh_release: $(SnippetDeformableMesh_release_bin): $(SnippetDeformableMesh_release_obj) build_SnippetUtils_release mkdir -p `dirname ./../../../Bin/android16/SnippetDeformableMesh` $(CCLD) $(SnippetDeformableMesh_release_obj) $(SnippetDeformableMesh_release_lflags) -o $(SnippetDeformableMesh_release_bin) $(ECHO) building $@ complete! SnippetDeformableMesh_release_DEPDIR = $(dir $(@))/$(*F) $(SnippetDeformableMesh_release_cpp_o): $(SnippetDeformableMesh_release_objsdir)/%.o: $(ECHO) SnippetDeformableMesh: compiling release $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles))... mkdir -p $(dir $(@)) $(CXX) $(SnippetDeformableMesh_release_cppflags) -c $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles)) -o $@ @mkdir -p $(dir $(addprefix $(DEPSDIR)/SnippetDeformableMesh/release/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles)))))) cp $(SnippetDeformableMesh_release_DEPDIR).d $(addprefix $(DEPSDIR)/SnippetDeformableMesh/release/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles))))).P; \ sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \ -e '/^$$/ d' -e 's/$$/ :/' < $(SnippetDeformableMesh_release_DEPDIR).d >> $(addprefix $(DEPSDIR)/SnippetDeformableMesh/release/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cpp.o,.cpp, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_cppfiles))))).P; \ rm -f $(SnippetDeformableMesh_release_DEPDIR).d $(SnippetDeformableMesh_release_cc_o): $(SnippetDeformableMesh_release_objsdir)/%.o: $(ECHO) SnippetDeformableMesh: compiling release $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles))... mkdir -p $(dir $(@)) $(CXX) $(SnippetDeformableMesh_release_cppflags) -c $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles)) -o $@ mkdir -p $(dir $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles)))))) cp $(SnippetDeformableMesh_release_DEPDIR).d $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles))))).release.P; \ sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \ -e '/^$$/ d' -e 's/$$/ :/' < $(SnippetDeformableMesh_release_DEPDIR).d >> $(addprefix $(DEPSDIR)/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .cc.o,.cc, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_ccfiles))))).release.P; \ rm -f $(SnippetDeformableMesh_release_DEPDIR).d $(SnippetDeformableMesh_release_c_o): $(SnippetDeformableMesh_release_objsdir)/%.o: $(ECHO) SnippetDeformableMesh: compiling release $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_cfiles))... mkdir -p $(dir $(@)) $(CC) $(SnippetDeformableMesh_release_cflags) -c $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_cfiles)) -o $@ @mkdir -p $(dir $(addprefix $(DEPSDIR)/SnippetDeformableMesh/release/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_cfiles)))))) cp $(SnippetDeformableMesh_release_DEPDIR).d $(addprefix $(DEPSDIR)/SnippetDeformableMesh/release/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_cfiles))))).P; \ sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' \ -e '/^$$/ d' -e 's/$$/ :/' < $(SnippetDeformableMesh_release_DEPDIR).d >> $(addprefix $(DEPSDIR)/SnippetDeformableMesh/release/, $(subst ./, , $(subst ../, , $(filter %$(strip $(subst .c.o,.c, $(subst $(SnippetDeformableMesh_release_objsdir),, $@))), $(SnippetDeformableMesh_cfiles))))).P; \ rm -f $(SnippetDeformableMesh_release_DEPDIR).d clean_SnippetDeformableMesh: clean_SnippetDeformableMesh_debug clean_SnippetDeformableMesh_checked clean_SnippetDeformableMesh_profile clean_SnippetDeformableMesh_release rm -rf $(DEPSDIR) export VERBOSE ifndef VERBOSE .SILENT: endif ```
Carlsen may also refer to: People Carlsen, people with the surname Places Carlsen Air Force Base, a United States military facility in Trinidad Carlsen Island, part of the Svalbard archipelago Organisations Carlsen Verlag, a Danish publishing house See also Carlson (disambiguation)
Cimiatene (; ) was an ancient division of Paphlagonia, which took its name from a hill fort, Cimiata, at the foot of the range of Olgassys. Mithridates Ktistes slightly after 302 BC made this his first stronghold, and so became master of the Pontus. The territory remained a possession of the kings of Pontus until the death of Mithridates Eupator in 63 BC and the fall of the kingdom. References Smith, William (editor); Dictionary of Greek and Roman Geography, "Cimiatene", London, (1854) Notes Ancient Greek geography Geography of Paphlagonia
```objective-c // Generated by the protocol buffer compiler. DO NOT EDIT! // source: caffe2/proto/prof_dag.proto #ifndef PROTOBUF_caffe2_2fproto_2fprof_5fdag_2eproto__INCLUDED #define PROTOBUF_caffe2_2fproto_2fprof_5fdag_2eproto__INCLUDED #include <string> #include <google/protobuf/stubs/common.h> #if GOOGLE_PROTOBUF_VERSION < 3005000 #error This file was generated by a newer version of protoc which is #error incompatible with your Protocol Buffer headers. Please update #error your headers. #endif #if 3005000 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION #error This file was generated by an older version of protoc which is #error incompatible with your Protocol Buffer headers. Please #error regenerate this file with a newer version of protoc. #endif #include <google/protobuf/io/coded_stream.h> #include <google/protobuf/arena.h> #include <google/protobuf/arenastring.h> #include <google/protobuf/generated_message_table_driven.h> #include <google/protobuf/generated_message_util.h> #include <google/protobuf/metadata.h> #include <google/protobuf/message.h> #include <google/protobuf/repeated_field.h> // IWYU pragma: export #include <google/protobuf/extension_set.h> // IWYU pragma: export #include <google/protobuf/unknown_field_set.h> // @@protoc_insertion_point(includes) namespace protobuf_caffe2_2fproto_2fprof_5fdag_2eproto { // Internal implementation detail -- do not use these members. struct CAFFE2_API TableStruct { static const ::google::protobuf::internal::ParseTableField entries[]; static const ::google::protobuf::internal::AuxillaryParseTableField aux[]; static const ::google::protobuf::internal::ParseTable schema[4]; static const ::google::protobuf::internal::FieldMetadata field_metadata[]; static const ::google::protobuf::internal::SerializationTable serialization_table[]; static const ::google::protobuf::uint32 offsets[]; }; void CAFFE2_API AddDescriptors(); void CAFFE2_API InitDefaultsTwoNumberStatsProtoImpl(); void CAFFE2_API InitDefaultsTwoNumberStatsProto(); void CAFFE2_API InitDefaultsBlobProfileImpl(); void CAFFE2_API InitDefaultsBlobProfile(); void CAFFE2_API InitDefaultsProfDAGProtoImpl(); void CAFFE2_API InitDefaultsProfDAGProto(); void CAFFE2_API InitDefaultsProfDAGProtosImpl(); void CAFFE2_API InitDefaultsProfDAGProtos(); inline void CAFFE2_API InitDefaults() { InitDefaultsTwoNumberStatsProto(); InitDefaultsBlobProfile(); InitDefaultsProfDAGProto(); InitDefaultsProfDAGProtos(); } } // namespace protobuf_caffe2_2fproto_2fprof_5fdag_2eproto namespace caffe2 { const ::std::string& GetEmptyStringAlreadyInited(); class BlobProfile; class BlobProfileDefaultTypeInternal; CAFFE2_API extern BlobProfileDefaultTypeInternal _BlobProfile_default_instance_; class ProfDAGProto; class ProfDAGProtoDefaultTypeInternal; CAFFE2_API extern ProfDAGProtoDefaultTypeInternal _ProfDAGProto_default_instance_; class ProfDAGProtos; class ProfDAGProtosDefaultTypeInternal; CAFFE2_API extern ProfDAGProtosDefaultTypeInternal _ProfDAGProtos_default_instance_; class TwoNumberStatsProto; class TwoNumberStatsProtoDefaultTypeInternal; CAFFE2_API extern TwoNumberStatsProtoDefaultTypeInternal _TwoNumberStatsProto_default_instance_; } // namespace caffe2 namespace caffe2 { // =================================================================== class CAFFE2_API TwoNumberStatsProto : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:caffe2.TwoNumberStatsProto) */ { public: TwoNumberStatsProto(); virtual ~TwoNumberStatsProto(); TwoNumberStatsProto(const TwoNumberStatsProto& from); inline TwoNumberStatsProto& operator=(const TwoNumberStatsProto& from) { CopyFrom(from); return *this; } #if LANG_CXX11 TwoNumberStatsProto(TwoNumberStatsProto&& from) noexcept : TwoNumberStatsProto() { *this = ::std::move(from); } inline TwoNumberStatsProto& operator=(TwoNumberStatsProto&& from) noexcept { if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { if (this != &from) InternalSwap(&from); } else { CopyFrom(from); } return *this; } #endif inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { return _internal_metadata_.unknown_fields(); } inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { return _internal_metadata_.mutable_unknown_fields(); } static const ::google::protobuf::Descriptor* descriptor(); static const TwoNumberStatsProto& default_instance(); static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY static inline const TwoNumberStatsProto* internal_default_instance() { return reinterpret_cast<const TwoNumberStatsProto*>( &_TwoNumberStatsProto_default_instance_); } static int const kIndexInFileMessages = 0; void Swap(TwoNumberStatsProto* other); friend void swap(TwoNumberStatsProto& a, TwoNumberStatsProto& b) { a.Swap(&b); } // implements Message ---------------------------------------------- inline TwoNumberStatsProto* New() const PROTOBUF_FINAL { return New(NULL); } TwoNumberStatsProto* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL; void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; void CopyFrom(const TwoNumberStatsProto& from); void MergeFrom(const TwoNumberStatsProto& from); void Clear() PROTOBUF_FINAL; bool IsInitialized() const PROTOBUF_FINAL; size_t ByteSizeLong() const PROTOBUF_FINAL; bool MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL; void SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL; ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL; int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } private: void SharedCtor(); void SharedDtor(); void SetCachedSize(int size) const PROTOBUF_FINAL; void InternalSwap(TwoNumberStatsProto* other); private: inline ::google::protobuf::Arena* GetArenaNoVirtual() const { return NULL; } inline void* MaybeArenaPtr() const { return NULL; } public: ::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL; // nested types ---------------------------------------------------- // accessors ------------------------------------------------------- // optional float mean = 1; bool has_mean() const; void clear_mean(); static const int kMeanFieldNumber = 1; float mean() const; void set_mean(float value); // optional float stddev = 2; bool has_stddev() const; void clear_stddev(); static const int kStddevFieldNumber = 2; float stddev() const; void set_stddev(float value); // optional int64 count = 3; bool has_count() const; void clear_count(); static const int kCountFieldNumber = 3; ::google::protobuf::int64 count() const; void set_count(::google::protobuf::int64 value); // @@protoc_insertion_point(class_scope:caffe2.TwoNumberStatsProto) private: void set_has_mean(); void clear_has_mean(); void set_has_stddev(); void clear_has_stddev(); void set_has_count(); void clear_has_count(); ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::HasBits<1> _has_bits_; mutable int _cached_size_; float mean_; float stddev_; ::google::protobuf::int64 count_; friend struct ::protobuf_caffe2_2fproto_2fprof_5fdag_2eproto::TableStruct; friend void ::protobuf_caffe2_2fproto_2fprof_5fdag_2eproto::InitDefaultsTwoNumberStatsProtoImpl(); }; // your_sha256_hash--- class CAFFE2_API BlobProfile : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:caffe2.BlobProfile) */ { public: BlobProfile(); virtual ~BlobProfile(); BlobProfile(const BlobProfile& from); inline BlobProfile& operator=(const BlobProfile& from) { CopyFrom(from); return *this; } #if LANG_CXX11 BlobProfile(BlobProfile&& from) noexcept : BlobProfile() { *this = ::std::move(from); } inline BlobProfile& operator=(BlobProfile&& from) noexcept { if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { if (this != &from) InternalSwap(&from); } else { CopyFrom(from); } return *this; } #endif inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { return _internal_metadata_.unknown_fields(); } inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { return _internal_metadata_.mutable_unknown_fields(); } static const ::google::protobuf::Descriptor* descriptor(); static const BlobProfile& default_instance(); static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY static inline const BlobProfile* internal_default_instance() { return reinterpret_cast<const BlobProfile*>( &_BlobProfile_default_instance_); } static int const kIndexInFileMessages = 1; void Swap(BlobProfile* other); friend void swap(BlobProfile& a, BlobProfile& b) { a.Swap(&b); } // implements Message ---------------------------------------------- inline BlobProfile* New() const PROTOBUF_FINAL { return New(NULL); } BlobProfile* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL; void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; void CopyFrom(const BlobProfile& from); void MergeFrom(const BlobProfile& from); void Clear() PROTOBUF_FINAL; bool IsInitialized() const PROTOBUF_FINAL; size_t ByteSizeLong() const PROTOBUF_FINAL; bool MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL; void SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL; ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL; int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } private: void SharedCtor(); void SharedDtor(); void SetCachedSize(int size) const PROTOBUF_FINAL; void InternalSwap(BlobProfile* other); private: inline ::google::protobuf::Arena* GetArenaNoVirtual() const { return NULL; } inline void* MaybeArenaPtr() const { return NULL; } public: ::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL; // nested types ---------------------------------------------------- // accessors ------------------------------------------------------- // optional string name = 1; bool has_name() const; void clear_name(); static const int kNameFieldNumber = 1; const ::std::string& name() const; void set_name(const ::std::string& value); #if LANG_CXX11 void set_name(::std::string&& value); #endif void set_name(const char* value); void set_name(const char* value, size_t size); ::std::string* mutable_name(); ::std::string* release_name(); void set_allocated_name(::std::string* name); // optional .caffe2.TwoNumberStatsProto bytes_used = 3; bool has_bytes_used() const; void clear_bytes_used(); static const int kBytesUsedFieldNumber = 3; const ::caffe2::TwoNumberStatsProto& bytes_used() const; ::caffe2::TwoNumberStatsProto* release_bytes_used(); ::caffe2::TwoNumberStatsProto* mutable_bytes_used(); void set_allocated_bytes_used(::caffe2::TwoNumberStatsProto* bytes_used); // @@protoc_insertion_point(class_scope:caffe2.BlobProfile) private: void set_has_name(); void clear_has_name(); void set_has_bytes_used(); void clear_has_bytes_used(); ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::HasBits<1> _has_bits_; mutable int _cached_size_; ::google::protobuf::internal::ArenaStringPtr name_; ::caffe2::TwoNumberStatsProto* bytes_used_; friend struct ::protobuf_caffe2_2fproto_2fprof_5fdag_2eproto::TableStruct; friend void ::protobuf_caffe2_2fproto_2fprof_5fdag_2eproto::InitDefaultsBlobProfileImpl(); }; // your_sha256_hash--- class CAFFE2_API ProfDAGProto : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:caffe2.ProfDAGProto) */ { public: ProfDAGProto(); virtual ~ProfDAGProto(); ProfDAGProto(const ProfDAGProto& from); inline ProfDAGProto& operator=(const ProfDAGProto& from) { CopyFrom(from); return *this; } #if LANG_CXX11 ProfDAGProto(ProfDAGProto&& from) noexcept : ProfDAGProto() { *this = ::std::move(from); } inline ProfDAGProto& operator=(ProfDAGProto&& from) noexcept { if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { if (this != &from) InternalSwap(&from); } else { CopyFrom(from); } return *this; } #endif inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { return _internal_metadata_.unknown_fields(); } inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { return _internal_metadata_.mutable_unknown_fields(); } static const ::google::protobuf::Descriptor* descriptor(); static const ProfDAGProto& default_instance(); static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY static inline const ProfDAGProto* internal_default_instance() { return reinterpret_cast<const ProfDAGProto*>( &_ProfDAGProto_default_instance_); } static int const kIndexInFileMessages = 2; void Swap(ProfDAGProto* other); friend void swap(ProfDAGProto& a, ProfDAGProto& b) { a.Swap(&b); } // implements Message ---------------------------------------------- inline ProfDAGProto* New() const PROTOBUF_FINAL { return New(NULL); } ProfDAGProto* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL; void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; void CopyFrom(const ProfDAGProto& from); void MergeFrom(const ProfDAGProto& from); void Clear() PROTOBUF_FINAL; bool IsInitialized() const PROTOBUF_FINAL; size_t ByteSizeLong() const PROTOBUF_FINAL; bool MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL; void SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL; ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL; int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } private: void SharedCtor(); void SharedDtor(); void SetCachedSize(int size) const PROTOBUF_FINAL; void InternalSwap(ProfDAGProto* other); private: inline ::google::protobuf::Arena* GetArenaNoVirtual() const { return NULL; } inline void* MaybeArenaPtr() const { return NULL; } public: ::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL; // nested types ---------------------------------------------------- // accessors ------------------------------------------------------- // repeated .caffe2.BlobProfile output_profile = 5; int output_profile_size() const; void clear_output_profile(); static const int kOutputProfileFieldNumber = 5; const ::caffe2::BlobProfile& output_profile(int index) const; ::caffe2::BlobProfile* mutable_output_profile(int index); ::caffe2::BlobProfile* add_output_profile(); ::google::protobuf::RepeatedPtrField< ::caffe2::BlobProfile >* mutable_output_profile(); const ::google::protobuf::RepeatedPtrField< ::caffe2::BlobProfile >& output_profile() const; // required string name = 1; bool has_name() const; void clear_name(); static const int kNameFieldNumber = 1; const ::std::string& name() const; void set_name(const ::std::string& value); #if LANG_CXX11 void set_name(::std::string&& value); #endif void set_name(const char* value); void set_name(const char* value, size_t size); ::std::string* mutable_name(); ::std::string* release_name(); void set_allocated_name(::std::string* name); // optional .caffe2.TwoNumberStatsProto execution_time = 4; bool has_execution_time() const; void clear_execution_time(); static const int kExecutionTimeFieldNumber = 4; const ::caffe2::TwoNumberStatsProto& execution_time() const; ::caffe2::TwoNumberStatsProto* release_execution_time(); ::caffe2::TwoNumberStatsProto* mutable_execution_time(); void set_allocated_execution_time(::caffe2::TwoNumberStatsProto* execution_time); // required float mean = 2; bool has_mean() const; void clear_mean(); static const int kMeanFieldNumber = 2; float mean() const; void set_mean(float value); // required float stddev = 3; bool has_stddev() const; void clear_stddev(); static const int kStddevFieldNumber = 3; float stddev() const; void set_stddev(float value); // @@protoc_insertion_point(class_scope:caffe2.ProfDAGProto) private: void set_has_name(); void clear_has_name(); void set_has_mean(); void clear_has_mean(); void set_has_stddev(); void clear_has_stddev(); void set_has_execution_time(); void clear_has_execution_time(); // helper for ByteSizeLong() size_t RequiredFieldsByteSizeFallback() const; ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::HasBits<1> _has_bits_; mutable int _cached_size_; ::google::protobuf::RepeatedPtrField< ::caffe2::BlobProfile > output_profile_; ::google::protobuf::internal::ArenaStringPtr name_; ::caffe2::TwoNumberStatsProto* execution_time_; float mean_; float stddev_; friend struct ::protobuf_caffe2_2fproto_2fprof_5fdag_2eproto::TableStruct; friend void ::protobuf_caffe2_2fproto_2fprof_5fdag_2eproto::InitDefaultsProfDAGProtoImpl(); }; // your_sha256_hash--- class CAFFE2_API ProfDAGProtos : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:caffe2.ProfDAGProtos) */ { public: ProfDAGProtos(); virtual ~ProfDAGProtos(); ProfDAGProtos(const ProfDAGProtos& from); inline ProfDAGProtos& operator=(const ProfDAGProtos& from) { CopyFrom(from); return *this; } #if LANG_CXX11 ProfDAGProtos(ProfDAGProtos&& from) noexcept : ProfDAGProtos() { *this = ::std::move(from); } inline ProfDAGProtos& operator=(ProfDAGProtos&& from) noexcept { if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { if (this != &from) InternalSwap(&from); } else { CopyFrom(from); } return *this; } #endif inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { return _internal_metadata_.unknown_fields(); } inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { return _internal_metadata_.mutable_unknown_fields(); } static const ::google::protobuf::Descriptor* descriptor(); static const ProfDAGProtos& default_instance(); static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY static inline const ProfDAGProtos* internal_default_instance() { return reinterpret_cast<const ProfDAGProtos*>( &_ProfDAGProtos_default_instance_); } static int const kIndexInFileMessages = 3; void Swap(ProfDAGProtos* other); friend void swap(ProfDAGProtos& a, ProfDAGProtos& b) { a.Swap(&b); } // implements Message ---------------------------------------------- inline ProfDAGProtos* New() const PROTOBUF_FINAL { return New(NULL); } ProfDAGProtos* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL; void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; void CopyFrom(const ProfDAGProtos& from); void MergeFrom(const ProfDAGProtos& from); void Clear() PROTOBUF_FINAL; bool IsInitialized() const PROTOBUF_FINAL; size_t ByteSizeLong() const PROTOBUF_FINAL; bool MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL; void SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL; ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL; int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } private: void SharedCtor(); void SharedDtor(); void SetCachedSize(int size) const PROTOBUF_FINAL; void InternalSwap(ProfDAGProtos* other); private: inline ::google::protobuf::Arena* GetArenaNoVirtual() const { return NULL; } inline void* MaybeArenaPtr() const { return NULL; } public: ::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL; // nested types ---------------------------------------------------- // accessors ------------------------------------------------------- // repeated .caffe2.ProfDAGProto stats = 1; int stats_size() const; void clear_stats(); static const int kStatsFieldNumber = 1; const ::caffe2::ProfDAGProto& stats(int index) const; ::caffe2::ProfDAGProto* mutable_stats(int index); ::caffe2::ProfDAGProto* add_stats(); ::google::protobuf::RepeatedPtrField< ::caffe2::ProfDAGProto >* mutable_stats(); const ::google::protobuf::RepeatedPtrField< ::caffe2::ProfDAGProto >& stats() const; // optional string net_name = 2; bool has_net_name() const; void clear_net_name(); static const int kNetNameFieldNumber = 2; const ::std::string& net_name() const; void set_net_name(const ::std::string& value); #if LANG_CXX11 void set_net_name(::std::string&& value); #endif void set_net_name(const char* value); void set_net_name(const char* value, size_t size); ::std::string* mutable_net_name(); ::std::string* release_net_name(); void set_allocated_net_name(::std::string* net_name); // @@protoc_insertion_point(class_scope:caffe2.ProfDAGProtos) private: void set_has_net_name(); void clear_has_net_name(); ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::HasBits<1> _has_bits_; mutable int _cached_size_; ::google::protobuf::RepeatedPtrField< ::caffe2::ProfDAGProto > stats_; ::google::protobuf::internal::ArenaStringPtr net_name_; friend struct ::protobuf_caffe2_2fproto_2fprof_5fdag_2eproto::TableStruct; friend void ::protobuf_caffe2_2fproto_2fprof_5fdag_2eproto::InitDefaultsProfDAGProtosImpl(); }; // =================================================================== // =================================================================== #ifdef __GNUC__ #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wstrict-aliasing" #endif // __GNUC__ // TwoNumberStatsProto // optional float mean = 1; inline bool TwoNumberStatsProto::has_mean() const { return (_has_bits_[0] & 0x00000001u) != 0; } inline void TwoNumberStatsProto::set_has_mean() { _has_bits_[0] |= 0x00000001u; } inline void TwoNumberStatsProto::clear_has_mean() { _has_bits_[0] &= ~0x00000001u; } inline void TwoNumberStatsProto::clear_mean() { mean_ = 0; clear_has_mean(); } inline float TwoNumberStatsProto::mean() const { // @@protoc_insertion_point(field_get:caffe2.TwoNumberStatsProto.mean) return mean_; } inline void TwoNumberStatsProto::set_mean(float value) { set_has_mean(); mean_ = value; // @@protoc_insertion_point(field_set:caffe2.TwoNumberStatsProto.mean) } // optional float stddev = 2; inline bool TwoNumberStatsProto::has_stddev() const { return (_has_bits_[0] & 0x00000002u) != 0; } inline void TwoNumberStatsProto::set_has_stddev() { _has_bits_[0] |= 0x00000002u; } inline void TwoNumberStatsProto::clear_has_stddev() { _has_bits_[0] &= ~0x00000002u; } inline void TwoNumberStatsProto::clear_stddev() { stddev_ = 0; clear_has_stddev(); } inline float TwoNumberStatsProto::stddev() const { // @@protoc_insertion_point(field_get:caffe2.TwoNumberStatsProto.stddev) return stddev_; } inline void TwoNumberStatsProto::set_stddev(float value) { set_has_stddev(); stddev_ = value; // @@protoc_insertion_point(field_set:caffe2.TwoNumberStatsProto.stddev) } // optional int64 count = 3; inline bool TwoNumberStatsProto::has_count() const { return (_has_bits_[0] & 0x00000004u) != 0; } inline void TwoNumberStatsProto::set_has_count() { _has_bits_[0] |= 0x00000004u; } inline void TwoNumberStatsProto::clear_has_count() { _has_bits_[0] &= ~0x00000004u; } inline void TwoNumberStatsProto::clear_count() { count_ = GOOGLE_LONGLONG(0); clear_has_count(); } inline ::google::protobuf::int64 TwoNumberStatsProto::count() const { // @@protoc_insertion_point(field_get:caffe2.TwoNumberStatsProto.count) return count_; } inline void TwoNumberStatsProto::set_count(::google::protobuf::int64 value) { set_has_count(); count_ = value; // @@protoc_insertion_point(field_set:caffe2.TwoNumberStatsProto.count) } // your_sha256_hash--- // BlobProfile // optional string name = 1; inline bool BlobProfile::has_name() const { return (_has_bits_[0] & 0x00000001u) != 0; } inline void BlobProfile::set_has_name() { _has_bits_[0] |= 0x00000001u; } inline void BlobProfile::clear_has_name() { _has_bits_[0] &= ~0x00000001u; } inline void BlobProfile::clear_name() { name_.ClearToEmptyNoArena(&GetEmptyStringAlreadyInited()); clear_has_name(); } inline const ::std::string& BlobProfile::name() const { // @@protoc_insertion_point(field_get:caffe2.BlobProfile.name) return name_.GetNoArena(); } inline void BlobProfile::set_name(const ::std::string& value) { set_has_name(); name_.SetNoArena(&GetEmptyStringAlreadyInited(), value); // @@protoc_insertion_point(field_set:caffe2.BlobProfile.name) } #if LANG_CXX11 inline void BlobProfile::set_name(::std::string&& value) { set_has_name(); name_.SetNoArena( &GetEmptyStringAlreadyInited(), ::std::move(value)); // @@protoc_insertion_point(field_set_rvalue:caffe2.BlobProfile.name) } #endif inline void BlobProfile::set_name(const char* value) { GOOGLE_DCHECK(value != NULL); set_has_name(); name_.SetNoArena(&GetEmptyStringAlreadyInited(), ::std::string(value)); // @@protoc_insertion_point(field_set_char:caffe2.BlobProfile.name) } inline void BlobProfile::set_name(const char* value, size_t size) { set_has_name(); name_.SetNoArena(&GetEmptyStringAlreadyInited(), ::std::string(reinterpret_cast<const char*>(value), size)); // @@protoc_insertion_point(field_set_pointer:caffe2.BlobProfile.name) } inline ::std::string* BlobProfile::mutable_name() { set_has_name(); // @@protoc_insertion_point(field_mutable:caffe2.BlobProfile.name) return name_.MutableNoArena(&GetEmptyStringAlreadyInited()); } inline ::std::string* BlobProfile::release_name() { // @@protoc_insertion_point(field_release:caffe2.BlobProfile.name) clear_has_name(); return name_.ReleaseNoArena(&GetEmptyStringAlreadyInited()); } inline void BlobProfile::set_allocated_name(::std::string* name) { if (name != NULL) { set_has_name(); } else { clear_has_name(); } name_.SetAllocatedNoArena(&GetEmptyStringAlreadyInited(), name); // @@protoc_insertion_point(field_set_allocated:caffe2.BlobProfile.name) } // optional .caffe2.TwoNumberStatsProto bytes_used = 3; inline bool BlobProfile::has_bytes_used() const { return (_has_bits_[0] & 0x00000002u) != 0; } inline void BlobProfile::set_has_bytes_used() { _has_bits_[0] |= 0x00000002u; } inline void BlobProfile::clear_has_bytes_used() { _has_bits_[0] &= ~0x00000002u; } inline void BlobProfile::clear_bytes_used() { if (bytes_used_ != NULL) bytes_used_->Clear(); clear_has_bytes_used(); } inline const ::caffe2::TwoNumberStatsProto& BlobProfile::bytes_used() const { const ::caffe2::TwoNumberStatsProto* p = bytes_used_; // @@protoc_insertion_point(field_get:caffe2.BlobProfile.bytes_used) return p != NULL ? *p : *reinterpret_cast<const ::caffe2::TwoNumberStatsProto*>( &::caffe2::_TwoNumberStatsProto_default_instance_); } inline ::caffe2::TwoNumberStatsProto* BlobProfile::release_bytes_used() { // @@protoc_insertion_point(field_release:caffe2.BlobProfile.bytes_used) clear_has_bytes_used(); ::caffe2::TwoNumberStatsProto* temp = bytes_used_; bytes_used_ = NULL; return temp; } inline ::caffe2::TwoNumberStatsProto* BlobProfile::mutable_bytes_used() { set_has_bytes_used(); if (bytes_used_ == NULL) { bytes_used_ = new ::caffe2::TwoNumberStatsProto; } // @@protoc_insertion_point(field_mutable:caffe2.BlobProfile.bytes_used) return bytes_used_; } inline void BlobProfile::set_allocated_bytes_used(::caffe2::TwoNumberStatsProto* bytes_used) { ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); if (message_arena == NULL) { delete bytes_used_; } if (bytes_used) { ::google::protobuf::Arena* submessage_arena = NULL; if (message_arena != submessage_arena) { bytes_used = ::google::protobuf::internal::GetOwnedMessage( message_arena, bytes_used, submessage_arena); } set_has_bytes_used(); } else { clear_has_bytes_used(); } bytes_used_ = bytes_used; // @@protoc_insertion_point(field_set_allocated:caffe2.BlobProfile.bytes_used) } // your_sha256_hash--- // ProfDAGProto // required string name = 1; inline bool ProfDAGProto::has_name() const { return (_has_bits_[0] & 0x00000001u) != 0; } inline void ProfDAGProto::set_has_name() { _has_bits_[0] |= 0x00000001u; } inline void ProfDAGProto::clear_has_name() { _has_bits_[0] &= ~0x00000001u; } inline void ProfDAGProto::clear_name() { name_.ClearToEmptyNoArena(&GetEmptyStringAlreadyInited()); clear_has_name(); } inline const ::std::string& ProfDAGProto::name() const { // @@protoc_insertion_point(field_get:caffe2.ProfDAGProto.name) return name_.GetNoArena(); } inline void ProfDAGProto::set_name(const ::std::string& value) { set_has_name(); name_.SetNoArena(&GetEmptyStringAlreadyInited(), value); // @@protoc_insertion_point(field_set:caffe2.ProfDAGProto.name) } #if LANG_CXX11 inline void ProfDAGProto::set_name(::std::string&& value) { set_has_name(); name_.SetNoArena( &GetEmptyStringAlreadyInited(), ::std::move(value)); // @@protoc_insertion_point(field_set_rvalue:caffe2.ProfDAGProto.name) } #endif inline void ProfDAGProto::set_name(const char* value) { GOOGLE_DCHECK(value != NULL); set_has_name(); name_.SetNoArena(&GetEmptyStringAlreadyInited(), ::std::string(value)); // @@protoc_insertion_point(field_set_char:caffe2.ProfDAGProto.name) } inline void ProfDAGProto::set_name(const char* value, size_t size) { set_has_name(); name_.SetNoArena(&GetEmptyStringAlreadyInited(), ::std::string(reinterpret_cast<const char*>(value), size)); // @@protoc_insertion_point(field_set_pointer:caffe2.ProfDAGProto.name) } inline ::std::string* ProfDAGProto::mutable_name() { set_has_name(); // @@protoc_insertion_point(field_mutable:caffe2.ProfDAGProto.name) return name_.MutableNoArena(&GetEmptyStringAlreadyInited()); } inline ::std::string* ProfDAGProto::release_name() { // @@protoc_insertion_point(field_release:caffe2.ProfDAGProto.name) clear_has_name(); return name_.ReleaseNoArena(&GetEmptyStringAlreadyInited()); } inline void ProfDAGProto::set_allocated_name(::std::string* name) { if (name != NULL) { set_has_name(); } else { clear_has_name(); } name_.SetAllocatedNoArena(&GetEmptyStringAlreadyInited(), name); // @@protoc_insertion_point(field_set_allocated:caffe2.ProfDAGProto.name) } // required float mean = 2; inline bool ProfDAGProto::has_mean() const { return (_has_bits_[0] & 0x00000004u) != 0; } inline void ProfDAGProto::set_has_mean() { _has_bits_[0] |= 0x00000004u; } inline void ProfDAGProto::clear_has_mean() { _has_bits_[0] &= ~0x00000004u; } inline void ProfDAGProto::clear_mean() { mean_ = 0; clear_has_mean(); } inline float ProfDAGProto::mean() const { // @@protoc_insertion_point(field_get:caffe2.ProfDAGProto.mean) return mean_; } inline void ProfDAGProto::set_mean(float value) { set_has_mean(); mean_ = value; // @@protoc_insertion_point(field_set:caffe2.ProfDAGProto.mean) } // required float stddev = 3; inline bool ProfDAGProto::has_stddev() const { return (_has_bits_[0] & 0x00000008u) != 0; } inline void ProfDAGProto::set_has_stddev() { _has_bits_[0] |= 0x00000008u; } inline void ProfDAGProto::clear_has_stddev() { _has_bits_[0] &= ~0x00000008u; } inline void ProfDAGProto::clear_stddev() { stddev_ = 0; clear_has_stddev(); } inline float ProfDAGProto::stddev() const { // @@protoc_insertion_point(field_get:caffe2.ProfDAGProto.stddev) return stddev_; } inline void ProfDAGProto::set_stddev(float value) { set_has_stddev(); stddev_ = value; // @@protoc_insertion_point(field_set:caffe2.ProfDAGProto.stddev) } // optional .caffe2.TwoNumberStatsProto execution_time = 4; inline bool ProfDAGProto::has_execution_time() const { return (_has_bits_[0] & 0x00000002u) != 0; } inline void ProfDAGProto::set_has_execution_time() { _has_bits_[0] |= 0x00000002u; } inline void ProfDAGProto::clear_has_execution_time() { _has_bits_[0] &= ~0x00000002u; } inline void ProfDAGProto::clear_execution_time() { if (execution_time_ != NULL) execution_time_->Clear(); clear_has_execution_time(); } inline const ::caffe2::TwoNumberStatsProto& ProfDAGProto::execution_time() const { const ::caffe2::TwoNumberStatsProto* p = execution_time_; // @@protoc_insertion_point(field_get:caffe2.ProfDAGProto.execution_time) return p != NULL ? *p : *reinterpret_cast<const ::caffe2::TwoNumberStatsProto*>( &::caffe2::_TwoNumberStatsProto_default_instance_); } inline ::caffe2::TwoNumberStatsProto* ProfDAGProto::release_execution_time() { // @@protoc_insertion_point(field_release:caffe2.ProfDAGProto.execution_time) clear_has_execution_time(); ::caffe2::TwoNumberStatsProto* temp = execution_time_; execution_time_ = NULL; return temp; } inline ::caffe2::TwoNumberStatsProto* ProfDAGProto::mutable_execution_time() { set_has_execution_time(); if (execution_time_ == NULL) { execution_time_ = new ::caffe2::TwoNumberStatsProto; } // @@protoc_insertion_point(field_mutable:caffe2.ProfDAGProto.execution_time) return execution_time_; } inline void ProfDAGProto::set_allocated_execution_time(::caffe2::TwoNumberStatsProto* execution_time) { ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); if (message_arena == NULL) { delete execution_time_; } if (execution_time) { ::google::protobuf::Arena* submessage_arena = NULL; if (message_arena != submessage_arena) { execution_time = ::google::protobuf::internal::GetOwnedMessage( message_arena, execution_time, submessage_arena); } set_has_execution_time(); } else { clear_has_execution_time(); } execution_time_ = execution_time; // @@protoc_insertion_point(field_set_allocated:caffe2.ProfDAGProto.execution_time) } // repeated .caffe2.BlobProfile output_profile = 5; inline int ProfDAGProto::output_profile_size() const { return output_profile_.size(); } inline void ProfDAGProto::clear_output_profile() { output_profile_.Clear(); } inline const ::caffe2::BlobProfile& ProfDAGProto::output_profile(int index) const { // @@protoc_insertion_point(field_get:caffe2.ProfDAGProto.output_profile) return output_profile_.Get(index); } inline ::caffe2::BlobProfile* ProfDAGProto::mutable_output_profile(int index) { // @@protoc_insertion_point(field_mutable:caffe2.ProfDAGProto.output_profile) return output_profile_.Mutable(index); } inline ::caffe2::BlobProfile* ProfDAGProto::add_output_profile() { // @@protoc_insertion_point(field_add:caffe2.ProfDAGProto.output_profile) return output_profile_.Add(); } inline ::google::protobuf::RepeatedPtrField< ::caffe2::BlobProfile >* ProfDAGProto::mutable_output_profile() { // @@protoc_insertion_point(field_mutable_list:caffe2.ProfDAGProto.output_profile) return &output_profile_; } inline const ::google::protobuf::RepeatedPtrField< ::caffe2::BlobProfile >& ProfDAGProto::output_profile() const { // @@protoc_insertion_point(field_list:caffe2.ProfDAGProto.output_profile) return output_profile_; } // your_sha256_hash--- // ProfDAGProtos // repeated .caffe2.ProfDAGProto stats = 1; inline int ProfDAGProtos::stats_size() const { return stats_.size(); } inline void ProfDAGProtos::clear_stats() { stats_.Clear(); } inline const ::caffe2::ProfDAGProto& ProfDAGProtos::stats(int index) const { // @@protoc_insertion_point(field_get:caffe2.ProfDAGProtos.stats) return stats_.Get(index); } inline ::caffe2::ProfDAGProto* ProfDAGProtos::mutable_stats(int index) { // @@protoc_insertion_point(field_mutable:caffe2.ProfDAGProtos.stats) return stats_.Mutable(index); } inline ::caffe2::ProfDAGProto* ProfDAGProtos::add_stats() { // @@protoc_insertion_point(field_add:caffe2.ProfDAGProtos.stats) return stats_.Add(); } inline ::google::protobuf::RepeatedPtrField< ::caffe2::ProfDAGProto >* ProfDAGProtos::mutable_stats() { // @@protoc_insertion_point(field_mutable_list:caffe2.ProfDAGProtos.stats) return &stats_; } inline const ::google::protobuf::RepeatedPtrField< ::caffe2::ProfDAGProto >& ProfDAGProtos::stats() const { // @@protoc_insertion_point(field_list:caffe2.ProfDAGProtos.stats) return stats_; } // optional string net_name = 2; inline bool ProfDAGProtos::has_net_name() const { return (_has_bits_[0] & 0x00000001u) != 0; } inline void ProfDAGProtos::set_has_net_name() { _has_bits_[0] |= 0x00000001u; } inline void ProfDAGProtos::clear_has_net_name() { _has_bits_[0] &= ~0x00000001u; } inline void ProfDAGProtos::clear_net_name() { net_name_.ClearToEmptyNoArena(&GetEmptyStringAlreadyInited()); clear_has_net_name(); } inline const ::std::string& ProfDAGProtos::net_name() const { // @@protoc_insertion_point(field_get:caffe2.ProfDAGProtos.net_name) return net_name_.GetNoArena(); } inline void ProfDAGProtos::set_net_name(const ::std::string& value) { set_has_net_name(); net_name_.SetNoArena(&GetEmptyStringAlreadyInited(), value); // @@protoc_insertion_point(field_set:caffe2.ProfDAGProtos.net_name) } #if LANG_CXX11 inline void ProfDAGProtos::set_net_name(::std::string&& value) { set_has_net_name(); net_name_.SetNoArena( &GetEmptyStringAlreadyInited(), ::std::move(value)); // @@protoc_insertion_point(field_set_rvalue:caffe2.ProfDAGProtos.net_name) } #endif inline void ProfDAGProtos::set_net_name(const char* value) { GOOGLE_DCHECK(value != NULL); set_has_net_name(); net_name_.SetNoArena(&GetEmptyStringAlreadyInited(), ::std::string(value)); // @@protoc_insertion_point(field_set_char:caffe2.ProfDAGProtos.net_name) } inline void ProfDAGProtos::set_net_name(const char* value, size_t size) { set_has_net_name(); net_name_.SetNoArena(&GetEmptyStringAlreadyInited(), ::std::string(reinterpret_cast<const char*>(value), size)); // @@protoc_insertion_point(field_set_pointer:caffe2.ProfDAGProtos.net_name) } inline ::std::string* ProfDAGProtos::mutable_net_name() { set_has_net_name(); // @@protoc_insertion_point(field_mutable:caffe2.ProfDAGProtos.net_name) return net_name_.MutableNoArena(&GetEmptyStringAlreadyInited()); } inline ::std::string* ProfDAGProtos::release_net_name() { // @@protoc_insertion_point(field_release:caffe2.ProfDAGProtos.net_name) clear_has_net_name(); return net_name_.ReleaseNoArena(&GetEmptyStringAlreadyInited()); } inline void ProfDAGProtos::set_allocated_net_name(::std::string* net_name) { if (net_name != NULL) { set_has_net_name(); } else { clear_has_net_name(); } net_name_.SetAllocatedNoArena(&GetEmptyStringAlreadyInited(), net_name); // @@protoc_insertion_point(field_set_allocated:caffe2.ProfDAGProtos.net_name) } #ifdef __GNUC__ #pragma GCC diagnostic pop #endif // __GNUC__ // your_sha256_hash--- // your_sha256_hash--- // your_sha256_hash--- // @@protoc_insertion_point(namespace_scope) } // namespace caffe2 // @@protoc_insertion_point(global_scope) #endif // PROTOBUF_caffe2_2fproto_2fprof_5fdag_2eproto__INCLUDED ```
```yaml steps: - name: "gcr.io/cloud-builders/docker" args: [ "build", "-t", "${_GCR_PATH}/mnist_train:$COMMIT_SHA", "-t", "${_GCR_PATH}/mnist_train:latest", "${_CODE_PATH}/train", "-f", "${_CODE_PATH}/train/Dockerfile", ] id: "MnistBuildFirstImage" - name: "gcr.io/cloud-builders/docker" args: [ "push", "${_GCR_PATH}/mnist_train:$COMMIT_SHA", ] id: "MnistPushFirstImage" waitFor: ["MnistBuildFirstImage"] - name: "gcr.io/cloud-builders/docker" args: [ "build", "-t", "${_GCR_PATH}/mnist_tensorboard:$COMMIT_SHA", "-t", "${_GCR_PATH}/mnist_tensorboard:latest", "${_CODE_PATH}/tensorboard", "-f", "${_CODE_PATH}/tensorboard/Dockerfile", ] id: "MnistBuildSecondImage" - name: "gcr.io/cloud-builders/docker" args: [ "push", "${_GCR_PATH}/mnist_tensorboard:$COMMIT_SHA", ] id: "MnistPushSecondImage" waitFor: ["MnistBuildSecondImage"] - name: "python:3.7-slim" entrypoint: "/bin/sh" args: [ "-c", "cd ${_CODE_PATH}; pip3 install cffi==1.12.3 --upgrade; pip3 install kfp==0.1.37; sed -i 's|image: train_image_location|image: ${_GCR_PATH}/mnist_train:$COMMIT_SHA|g' ./train/component.yaml; sed -i 's|image: tensorboard_image_location|image: ${_GCR_PATH}/mnist_tensorboard:$COMMIT_SHA|g' ./tensorboard/component.yaml; sed -i 's|ui_metadata_path|${_UI_METADATA_PATH}|g' ./tensorboard/component.yaml; python pipeline.py --gcr_address ${_GCR_PATH}; cp pipeline.py.zip /workspace/pipeline.zip", ] id: "MnistPackagePipeline" - name: "gcr.io/cloud-builders/gsutil" args: [ "cp", "/workspace/pipeline.zip", "${_GS_BUCKET}/$COMMIT_SHA/pipeline.zip" ] id: "MnistUploadPipeline" waitFor: ["MnistPackagePipeline"] - name: "gcr.io/cloud-builders/kubectl" entrypoint: "/bin/sh" args: [ "-c", "cd ${_CODE_PATH}; apt-get update; apt-get install -y python3-pip; apt-get install -y libssl-dev libffi-dev; /builder/kubectl.bash; pip3 install kfp==0.1.37; pip3 install kubernetes; python3 create_pipeline_version_and_run.py --bucket_name ${_GS_BUCKET} --commit_sha $COMMIT_SHA --pipeline_id ${_PIPELINE_ID} --output_path ${_UI_METADATA_PATH}" ] env: - "CLOUDSDK_COMPUTE_ZONE=[Your cluster zone, for example: us-central1-a]" - "CLOUDSDK_CONTAINER_CLUSTER=[Your cluster name, for example: my-cluster]" id: "MnistCreatePipelineVersionAndRun" images: - "${_GCR_PATH}/mnist_train:latest" - "${_GCR_PATH}/mnist_tensorboard:latest" substitutions: _CODE_PATH: /workspace/samples/contrib/versioned-pipeline-ci-samples/mnist-ci-sample _NAMESPACE: kubeflow _GCR_PATH: [Your cloud registry path. For example, gcr.io/my-project-id] _GS_BUCKET: [Name of your cloud storage bucket. For example, gs://my-project-bucket] _PIPELINE_ID: [Your kubeflow pipeline id to create a version on. Get it from Kubeflow Pipeline UI. For example, f6f8558a-6eec-4ef4-b343-a650473ee613] _UI_METADATA_PATH: [Path to the file which specifies where your metadata is located. For example, /mlpipeline-ui-metadata.json ] ```
Stefano Vetrano (25 March 1923 – 1 August 2018) was an Italian politician who served as a Deputy representing the Italian Communist Party from 1968 to 1975, and held a degree in industrial chemistry. He died at the age of 95 in Baiano, Province of Avellino, on 1 August 2018. References 1923 births 2018 deaths Italian Communist Party politicians Politicians of Campania Deputies of Legislature VI of Italy Deputies of Legislature V of Italy
Friedrich Heinrich Wilhelm Martini (31 August 1729,Ohrdruf – 27 June 1778, Berlin) was a German physician, translator and conchologist. Martini practised medicine in Berlin. In 1769, he began the colour-illustrated shell book: Neues systematisches Conchylien-Cabinet published by Gabriel Nikolaus Raspe at Nürnberg. He died following the publication of the third volume, after which the series was continued by Johann Hieronymus Chemnitz (1730–1800), who added eight volumes between 1779 and 1795. Because these volumes did not use the binomial system, the species names introduced are not considered valid. However, the specimens illustrated often became type specimens, because subsequently others referred to them when publishing valid descriptions. In 1773, Martini founded the Berlinische Gesellschaft Naturforschender Freunde, consisting of a group of natural historians who visited each other's collections and built up a collection for the society. The society still exists today. References External links BHL Digital Neues systematisches Conchylien-Cabinet BHL Typescript index to Neues systematisches Conchylien-Cabinet Zoologica Göttingen State and University Library Digital Neues systematisches Conchylien-Cabinet Conchologists 18th-century German physicians 1729 births 1778 deaths