code stringlengths 3 1.01M | repo_name stringlengths 5 116 | path stringlengths 3 311 | language stringclasses 30
values | license stringclasses 15
values | size int64 3 1.01M |
|---|---|---|---|---|---|
/*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui;
import com.intellij.util.PairFunction;
import com.intellij.util.containers.Convertor;
import javax.swing.*;
import javax.swing.table.TableModel;
import java.util.ListIterator;
public class TableSpeedSearch extends SpeedSearchBase<JTable> {
private static final PairFunction<Object, Cell, String> TO_STRING = new PairFunction<Object, Cell, String>() {
public String fun(Object o, Cell cell) {
return o == null ? "" : o.toString();
}
};
private final PairFunction<Object, Cell, String> myToStringConvertor;
public TableSpeedSearch(JTable table) {
this(table, TO_STRING);
}
public TableSpeedSearch(JTable table, final Convertor<Object, String> toStringConvertor) {
this(table, new PairFunction<Object, Cell, String>() {
@Override
public String fun(Object o, Cell c) {
return toStringConvertor.convert(o);
}
});
}
public TableSpeedSearch(JTable table, final PairFunction<Object, Cell, String> toStringConvertor) {
super(table);
myToStringConvertor = toStringConvertor;
}
protected boolean isSpeedSearchEnabled() {
return !getComponent().isEditing() && super.isSpeedSearchEnabled();
}
@Override
protected ListIterator<Object> getElementIterator(int startingIndex) {
return new MyListIterator(startingIndex);
}
protected int getElementCount() {
final TableModel tableModel = myComponent.getModel();
return tableModel.getRowCount() * tableModel.getColumnCount();
}
protected void selectElement(Object element, String selectedText) {
final int index = ((Integer)element).intValue();
final TableModel model = myComponent.getModel();
final int row = index / model.getColumnCount();
final int col = index % model.getColumnCount();
myComponent.getSelectionModel().setSelectionInterval(row, row);
myComponent.getColumnModel().getSelectionModel().setSelectionInterval(col, col);
TableUtil.scrollSelectionToVisible(myComponent);
}
protected int getSelectedIndex() {
final int row = myComponent.getSelectedRow();
final int col = myComponent.getSelectedColumn();
// selected row is not enough as we want to select specific cell in a large multi-column table
return row > -1 && col > -1 ? row * myComponent.getModel().getColumnCount() + col : -1;
}
protected Object[] getAllElements() {
throw new UnsupportedOperationException("Not implemented");
}
protected String getElementText(Object element) {
final int index = ((Integer)element).intValue();
final TableModel model = myComponent.getModel();
int row = myComponent.convertRowIndexToModel(index / model.getColumnCount());
int col = myComponent.convertColumnIndexToModel(index % model.getColumnCount());
Object value = model.getValueAt(row, col);
return myToStringConvertor.fun(value, new Cell(row, col));
}
private class MyListIterator implements ListIterator<Object> {
private int myCursor;
public MyListIterator(int startingIndex) {
final int total = getElementCount();
myCursor = startingIndex < 0 ? total : startingIndex;
}
public boolean hasNext() {
return myCursor < getElementCount();
}
public Object next() {
return myCursor++;
}
public boolean hasPrevious() {
return myCursor > 0;
}
public Object previous() {
return (myCursor--) - 1;
}
public int nextIndex() {
return myCursor;
}
public int previousIndex() {
return myCursor - 1;
}
public void remove() {
throw new AssertionError("Not Implemented");
}
public void set(Object o) {
throw new AssertionError("Not Implemented");
}
public void add(Object o) {
throw new AssertionError("Not Implemented");
}
}
}
| liveqmock/platform-tools-idea | platform/platform-impl/src/com/intellij/ui/TableSpeedSearch.java | Java | apache-2.0 | 4,403 |
/*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.endpoint.web;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.boot.actuate.endpoint.ExposableEndpoint;
/**
* A resolver for {@link Link links} to web endpoints.
*
* @author Andy Wilkinson
* @since 2.0.0
*/
public class EndpointLinksResolver {
private static final Log logger = LogFactory.getLog(EndpointLinksResolver.class);
private final Collection<? extends ExposableEndpoint<?>> endpoints;
/**
* Creates a new {@code EndpointLinksResolver} that will resolve links to the given
* {@code endpoints}.
* @param endpoints the endpoints
*/
public EndpointLinksResolver(Collection<? extends ExposableEndpoint<?>> endpoints) {
this.endpoints = endpoints;
}
/**
* Creates a new {@code EndpointLinksResolver} that will resolve links to the given
* {@code endpoints} that are exposed beneath the given {@code basePath}.
* @param endpoints the endpoints
* @param basePath the basePath
*/
public EndpointLinksResolver(Collection<? extends ExposableEndpoint<?>> endpoints,
String basePath) {
this.endpoints = endpoints;
if (logger.isInfoEnabled()) {
logger.info("Exposing " + endpoints.size()
+ " endpoint(s) beneath base path '" + basePath + "'");
}
}
/**
* Resolves links to the known endpoints based on a request with the given
* {@code requestUrl}.
* @param requestUrl the url of the request for the endpoint links
* @return the links
*/
public Map<String, Link> resolveLinks(String requestUrl) {
String normalizedUrl = normalizeRequestUrl(requestUrl);
Map<String, Link> links = new LinkedHashMap<>();
links.put("self", new Link(normalizedUrl));
for (ExposableEndpoint<?> endpoint : this.endpoints) {
if (endpoint instanceof ExposableWebEndpoint) {
collectLinks(links, (ExposableWebEndpoint) endpoint, normalizedUrl);
}
else if (endpoint instanceof PathMappedEndpoint) {
links.put(endpoint.getId(), createLink(normalizedUrl,
((PathMappedEndpoint) endpoint).getRootPath()));
}
}
return links;
}
private String normalizeRequestUrl(String requestUrl) {
if (requestUrl.endsWith("/")) {
return requestUrl.substring(0, requestUrl.length() - 1);
}
return requestUrl;
}
private void collectLinks(Map<String, Link> links, ExposableWebEndpoint endpoint,
String normalizedUrl) {
for (WebOperation operation : endpoint.getOperations()) {
links.put(operation.getId(), createLink(normalizedUrl, operation));
}
}
private Link createLink(String requestUrl, WebOperation operation) {
return createLink(requestUrl, operation.getRequestPredicate().getPath());
}
private Link createLink(String requestUrl, String path) {
return new Link(requestUrl + (path.startsWith("/") ? path : "/" + path));
}
}
| bclozel/spring-boot | spring-boot-project/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/web/EndpointLinksResolver.java | Java | apache-2.0 | 3,528 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import junit.framework.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.yarn.api.ContainerManagementProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.security.NMTokenIdentifier;
import org.apache.hadoop.yarn.server.api.ResourceTracker;
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
import org.apache.hadoop.yarn.server.nodemanager.LocalRMInterface;
import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager.NMContext;
import org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdater;
import org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.server.nodemanager.security.NMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.nodemanager.security.NMTokenSecretManagerInNM;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.junit.After;
import org.junit.Before;
public abstract class BaseContainerManagerTest {
protected static RecordFactory recordFactory = RecordFactoryProvider
.getRecordFactory(null);
protected static FileContext localFS;
protected static File localDir;
protected static File localLogDir;
protected static File remoteLogDir;
protected static File tmpDir;
protected final NodeManagerMetrics metrics = NodeManagerMetrics.create();
public BaseContainerManagerTest() throws UnsupportedFileSystemException {
localFS = FileContext.getLocalFSFileContext();
localDir =
new File("target", this.getClass().getSimpleName() + "-localDir")
.getAbsoluteFile();
localLogDir =
new File("target", this.getClass().getSimpleName() + "-localLogDir")
.getAbsoluteFile();
remoteLogDir =
new File("target", this.getClass().getSimpleName() + "-remoteLogDir")
.getAbsoluteFile();
tmpDir = new File("target", this.getClass().getSimpleName() + "-tmpDir");
}
protected static Log LOG = LogFactory
.getLog(BaseContainerManagerTest.class);
protected static final int HTTP_PORT = 5412;
protected Configuration conf = new YarnConfiguration();
protected Context context = new NMContext(new NMContainerTokenSecretManager(
conf), new NMTokenSecretManagerInNM()) {
public int getHttpPort() {
return HTTP_PORT;
};
};
protected ContainerExecutor exec;
protected DeletionService delSrvc;
protected String user = "nobody";
protected NodeHealthCheckerService nodeHealthChecker;
protected LocalDirsHandlerService dirsHandler;
protected final long DUMMY_RM_IDENTIFIER = 1234;
protected NodeStatusUpdater nodeStatusUpdater = new NodeStatusUpdaterImpl(
context, new AsyncDispatcher(), null, metrics) {
@Override
protected ResourceTracker getRMClient() {
return new LocalRMInterface();
};
@Override
protected void stopRMProxy() {
return;
}
@Override
protected void startStatusUpdater() {
return; // Don't start any updating thread.
}
@Override
public long getRMIdentifier() {
// There is no real RM registration, simulate and set RMIdentifier
return DUMMY_RM_IDENTIFIER;
}
};
protected ContainerManagerImpl containerManager = null;
protected ContainerExecutor createContainerExecutor() {
DefaultContainerExecutor exec = new DefaultContainerExecutor();
exec.setConf(conf);
return exec;
}
@Before
public void setup() throws IOException {
localFS.delete(new Path(localDir.getAbsolutePath()), true);
localFS.delete(new Path(tmpDir.getAbsolutePath()), true);
localFS.delete(new Path(localLogDir.getAbsolutePath()), true);
localFS.delete(new Path(remoteLogDir.getAbsolutePath()), true);
localDir.mkdir();
tmpDir.mkdir();
localLogDir.mkdir();
remoteLogDir.mkdir();
LOG.info("Created localDir in " + localDir.getAbsolutePath());
LOG.info("Created tmpDir in " + tmpDir.getAbsolutePath());
String bindAddress = "0.0.0.0:12345";
conf.set(YarnConfiguration.NM_ADDRESS, bindAddress);
conf.set(YarnConfiguration.NM_LOCAL_DIRS, localDir.getAbsolutePath());
conf.set(YarnConfiguration.NM_LOG_DIRS, localLogDir.getAbsolutePath());
conf.set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, remoteLogDir.getAbsolutePath());
conf.setLong(YarnConfiguration.NM_LOG_RETAIN_SECONDS, 1);
// Default delSrvc
delSrvc = createDeletionService();
delSrvc.init(conf);
exec = createContainerExecutor();
nodeHealthChecker = new NodeHealthCheckerService();
nodeHealthChecker.init(conf);
dirsHandler = nodeHealthChecker.getDiskHandler();
containerManager = createContainerManager(delSrvc);
((NMContext)context).setContainerManager(containerManager);
nodeStatusUpdater.init(conf);
containerManager.init(conf);
nodeStatusUpdater.start();
}
protected ContainerManagerImpl
createContainerManager(DeletionService delSrvc) {
return new ContainerManagerImpl(context, exec, delSrvc, nodeStatusUpdater,
metrics, new ApplicationACLsManager(conf), dirsHandler) {
@Override
public void
setBlockNewContainerRequests(boolean blockNewContainerRequests) {
// do nothing
}
@Override
protected void authorizeGetAndStopContainerRequest(ContainerId containerId,
Container container, boolean stopRequest, NMTokenIdentifier identifier) throws YarnException {
// do nothing
}
@Override
protected void authorizeUser(UserGroupInformation remoteUgi,
NMTokenIdentifier nmTokenIdentifier) {
// do nothing
}
@Override
protected void authorizeStartRequest(
NMTokenIdentifier nmTokenIdentifier,
ContainerTokenIdentifier containerTokenIdentifier) throws YarnException {
// do nothing
}
@Override
protected void updateNMTokenIdentifier(
NMTokenIdentifier nmTokenIdentifier) throws InvalidToken {
// Do nothing
}
@Override
public Map<String, ByteBuffer> getAuxServiceMetaData() {
Map<String, ByteBuffer> serviceData = new HashMap<String, ByteBuffer>();
serviceData.put("AuxService1",
ByteBuffer.wrap("AuxServiceMetaData1".getBytes()));
serviceData.put("AuxService2",
ByteBuffer.wrap("AuxServiceMetaData2".getBytes()));
return serviceData;
}
};
}
protected DeletionService createDeletionService() {
return new DeletionService(exec) {
@Override
public void delete(String user, Path subDir, Path[] baseDirs) {
// Don't do any deletions.
LOG.info("Psuedo delete: user - " + user + ", subDir - " + subDir
+ ", baseDirs - " + baseDirs);
};
};
}
@After
public void tearDown() throws IOException, InterruptedException {
if (containerManager != null) {
containerManager.stop();
}
createContainerExecutor().deleteAsUser(user,
new Path(localDir.getAbsolutePath()), new Path[] {});
}
public static void waitForContainerState(ContainerManagementProtocol containerManager,
ContainerId containerID, ContainerState finalState)
throws InterruptedException, YarnException, IOException {
waitForContainerState(containerManager, containerID, finalState, 20);
}
public static void waitForContainerState(ContainerManagementProtocol containerManager,
ContainerId containerID, ContainerState finalState, int timeOutMax)
throws InterruptedException, YarnException, IOException {
List<ContainerId> list = new ArrayList<ContainerId>();
list.add(containerID);
GetContainerStatusesRequest request =
GetContainerStatusesRequest.newInstance(list);
ContainerStatus containerStatus =
containerManager.getContainerStatuses(request).getContainerStatuses()
.get(0);
int timeoutSecs = 0;
while (!containerStatus.getState().equals(finalState)
&& timeoutSecs++ < timeOutMax) {
Thread.sleep(1000);
LOG.info("Waiting for container to get into state " + finalState
+ ". Current state is " + containerStatus.getState());
containerStatus = containerManager.getContainerStatuses(request).getContainerStatuses().get(0);
}
LOG.info("Container state is " + containerStatus.getState());
Assert.assertEquals("ContainerState is not correct (timedout)",
finalState, containerStatus.getState());
}
static void waitForApplicationState(ContainerManagerImpl containerManager,
ApplicationId appID, ApplicationState finalState)
throws InterruptedException {
// Wait for app-finish
Application app =
containerManager.getContext().getApplications().get(appID);
int timeout = 0;
while (!(app.getApplicationState().equals(finalState))
&& timeout++ < 15) {
LOG.info("Waiting for app to reach " + finalState
+ ".. Current state is "
+ app.getApplicationState());
Thread.sleep(1000);
}
Assert.assertTrue("App is not in " + finalState + " yet!! Timedout!!",
app.getApplicationState().equals(finalState));
}
}
| tomatoKiller/Hadoop_Source_Learn | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java | Java | apache-2.0 | 11,919 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.proxy;
import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;
import io.airlift.configuration.validation.FileExists;
import javax.validation.constraints.NotNull;
import java.io.File;
import java.net.URI;
public class ProxyConfig
{
private URI uri;
private File sharedSecretFile;
@NotNull
public URI getUri()
{
return uri;
}
@Config("proxy.uri")
@ConfigDescription("URI of the remote Trino server")
public ProxyConfig setUri(URI uri)
{
this.uri = uri;
return this;
}
@NotNull
@FileExists
public File getSharedSecretFile()
{
return sharedSecretFile;
}
@Config("proxy.shared-secret-file")
@ConfigDescription("Shared secret file used for authenticating URIs")
public ProxyConfig setSharedSecretFile(File sharedSecretFile)
{
this.sharedSecretFile = sharedSecretFile;
return this;
}
}
| electrum/presto | service/trino-proxy/src/main/java/io/trino/proxy/ProxyConfig.java | Java | apache-2.0 | 1,534 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import exceptions
from heat.common import exception
from heat.common import heat_keystoneclient as hkc
from heat.engine.clients import client_plugin
from heat.engine import constraints
class KeystoneClientPlugin(client_plugin.ClientPlugin):
exceptions_module = exceptions
service_types = [IDENTITY] = ['identity']
def _create(self):
return hkc.KeystoneClient(self.context)
def is_not_found(self, ex):
return isinstance(ex, exceptions.NotFound)
def is_over_limit(self, ex):
return isinstance(ex, exceptions.RequestEntityTooLarge)
def is_conflict(self, ex):
return isinstance(ex, exceptions.Conflict)
def get_role_id(self, role):
try:
role_obj = self.client().client.roles.get(role)
return role_obj.id
except exceptions.NotFound:
role_list = self.client().client.roles.list(name=role)
for role_obj in role_list:
if role_obj.name == role:
return role_obj.id
raise exception.EntityNotFound(entity='KeystoneRole', name=role)
def get_project_id(self, project):
try:
project_obj = self.client().client.projects.get(project)
return project_obj.id
except exceptions.NotFound:
project_list = self.client().client.projects.list(name=project)
for project_obj in project_list:
if project_obj.name == project:
return project_obj.id
raise exception.EntityNotFound(entity='KeystoneProject',
name=project)
def get_domain_id(self, domain):
try:
domain_obj = self.client().client.domains.get(domain)
return domain_obj.id
except exceptions.NotFound:
domain_list = self.client().client.domains.list(name=domain)
for domain_obj in domain_list:
if domain_obj.name == domain:
return domain_obj.id
raise exception.EntityNotFound(entity='KeystoneDomain', name=domain)
def get_group_id(self, group):
try:
group_obj = self.client().client.groups.get(group)
return group_obj.id
except exceptions.NotFound:
group_list = self.client().client.groups.list(name=group)
for group_obj in group_list:
if group_obj.name == group:
return group_obj.id
raise exception.EntityNotFound(entity='KeystoneGroup', name=group)
def get_service_id(self, service):
try:
service_obj = self.client().client.services.get(service)
return service_obj.id
except exceptions.NotFound:
service_list = self.client().client.services.list(name=service)
if len(service_list) == 1:
return service_list[0].id
elif len(service_list) > 1:
raise exception.KeystoneServiceNameConflict(service=service)
else:
raise exception.EntityNotFound(entity='KeystoneService',
name=service)
def get_user_id(self, user):
try:
user_obj = self.client().client.users.get(user)
return user_obj.id
except exceptions.NotFound:
user_list = self.client().client.users.list(name=user)
for user_obj in user_list:
if user_obj.name == user:
return user_obj.id
raise exception.EntityNotFound(entity='KeystoneUser', name=user)
class KeystoneRoleConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (exception.EntityNotFound,)
def validate_with_client(self, client, role):
client.client_plugin('keystone').get_role_id(role)
class KeystoneDomainConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (exception.EntityNotFound,)
def validate_with_client(self, client, domain):
client.client_plugin('keystone').get_domain_id(domain)
class KeystoneProjectConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (exception.EntityNotFound,)
def validate_with_client(self, client, project):
client.client_plugin('keystone').get_project_id(project)
class KeystoneGroupConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (exception.EntityNotFound,)
def validate_with_client(self, client, group):
client.client_plugin('keystone').get_group_id(group)
class KeystoneServiceConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (exception.EntityNotFound,
exception.KeystoneServiceNameConflict,)
def validate_with_client(self, client, service):
client.client_plugin('keystone').get_service_id(service)
class KeystoneUserConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (exception.EntityNotFound,)
def validate_with_client(self, client, user):
client.client_plugin('keystone').get_user_id(user)
| cryptickp/heat | heat/engine/clients/os/keystone.py | Python | apache-2.0 | 5,676 |
// test cube
var assert = require('assert'),
math = require('../../../index'),
error = require('../../../lib/error/index'),
unit = math.unit,
bignumber = math.bignumber,
matrix = math.matrix,
range = math.range,
cube = math.cube;
describe('cube', function() {
it('should return the cube of a boolean', function () {
assert.equal(cube(true), 1);
assert.equal(cube(false), 0);
});
it('should return the cube of null', function () {
assert.equal(math.ceil(null), 0);
});
it('should return the cube of a number', function() {
assert.equal(cube(4), 64);
assert.equal(cube(-2), -8);
assert.equal(cube(0), 0);
});
it('should return the cube of a big number', function() {
assert.deepEqual(cube(bignumber(4)), bignumber(64));
assert.deepEqual(cube(bignumber(-2)), bignumber(-8));
assert.deepEqual(cube(bignumber(0)), bignumber(0));
});
it('should return the cube of a complex number', function() {
assert.deepEqual(cube(math.complex('2i')), math.complex('-8i'));
assert.deepEqual(cube(math.complex('2+3i')), math.complex('-46+9i'));
assert.deepEqual(cube(math.complex('2')), math.complex('8'));
});
it('should throw an error with strings', function() {
assert.throws(function () {cube('text')});
});
it('should throw an error with units', function() {
assert.throws(function () {cube(unit('5cm'))});
});
it('should throw an error if there\'s wrong number of args', function() {
assert.throws(function () {cube()}, error.ArgumentsError);
assert.throws(function () {cube(1, 2)}, error.ArgumentsError);
});
it('should cube each element in a matrix, array or range', function() {
// array, matrix, range
// arrays are evaluated element wise
assert.deepEqual(cube([2,3,4,5]), [8,27,64,125]);
assert.deepEqual(cube(matrix([2,3,4,5])), matrix([8,27,64,125]));
assert.deepEqual(cube(matrix([[1,2],[3,4]])), matrix([[1,8],[27,64]]));
});
}); | owenversteeg/mathjs | test/function/arithmetic/cube.test.js | JavaScript | apache-2.0 | 1,982 |
//========= Copyright © 1996-2005, Valve Corporation, All rights reserved. ============//
//
// Purpose:
//
// $NoKeywords: $
//=============================================================================//
#include "cbase.h"
#include "hl2mp_cvars.h"
// Ready restart
ConVar mp_readyrestart(
"mp_readyrestart",
"0",
FCVAR_GAMEDLL,
"If non-zero, game will restart once each player gives the ready signal" );
// Ready signal
ConVar mp_ready_signal(
"mp_ready_signal",
"ready",
FCVAR_GAMEDLL,
"Text that each player must speak for the match to begin" ); | TheCallSign/Clifton-Source | src/game/server/hl2mp/hl2mp_cvars.cpp | C++ | apache-2.0 | 616 |
// This file was automatically generated. Do not modify.
'use strict';
goog.provide('Blockly.Msg.id');
goog.require('Blockly.Msg');
Blockly.Msg.ADD_COMMENT = "Tambahkan sebuah comment";
Blockly.Msg.CHANGE_VALUE_TITLE = "Ubah nilai:";
Blockly.Msg.COLLAPSE_ALL = "Tutup blok";
Blockly.Msg.COLLAPSE_BLOCK = "Tutup blok";
Blockly.Msg.COLOUR_BLEND_COLOUR1 = "Warna 1";
Blockly.Msg.COLOUR_BLEND_COLOUR2 = "Warna 2";
Blockly.Msg.COLOUR_BLEND_HELPURL = "http://meyerweb.com/eric/tools/color-blend/";
Blockly.Msg.COLOUR_BLEND_RATIO = "rasio";
Blockly.Msg.COLOUR_BLEND_TITLE = "Tertutup";
Blockly.Msg.COLOUR_BLEND_TOOLTIP = "mencampur dua warna secara bersamaan dengan perbandingan (0.0-1.0).";
Blockly.Msg.COLOUR_PICKER_HELPURL = "https://en.wikipedia.org/wiki/Color";
Blockly.Msg.COLOUR_PICKER_TOOLTIP = "Pilih warna dari daftar warna.";
Blockly.Msg.COLOUR_RANDOM_HELPURL = "http://randomcolour.com"; // untranslated
Blockly.Msg.COLOUR_RANDOM_TITLE = "Warna acak";
Blockly.Msg.COLOUR_RANDOM_TOOLTIP = "Pilih warna secara acak.";
Blockly.Msg.COLOUR_RGB_BLUE = "biru";
Blockly.Msg.COLOUR_RGB_GREEN = "hijau";
Blockly.Msg.COLOUR_RGB_HELPURL = "http://www.december.com/html/spec/colorper.html";
Blockly.Msg.COLOUR_RGB_RED = "merah";
Blockly.Msg.COLOUR_RGB_TITLE = "Dengan warna";
Blockly.Msg.COLOUR_RGB_TOOLTIP = "Buatlah warna dengan jumlah yang ditentukan dari merah, hijau dan biru. Semua nilai harus antarai 0 sampai 100.";
Blockly.Msg.CONTROLS_FLOW_STATEMENTS_HELPURL = "https://code.google.com/p/blockly/wiki/Loops#Loop_Termination_Blocks";
Blockly.Msg.CONTROLS_FLOW_STATEMENTS_OPERATOR_BREAK = "Keluar dari perulangan";
Blockly.Msg.CONTROLS_FLOW_STATEMENTS_OPERATOR_CONTINUE = "Lanjutkan dengan langkah penggulangan berikutnya";
Blockly.Msg.CONTROLS_FLOW_STATEMENTS_TOOLTIP_BREAK = "Keluar sementara dari perulanggan.";
Blockly.Msg.CONTROLS_FLOW_STATEMENTS_TOOLTIP_CONTINUE = "Abaikan sisa dari loop ini, dan lanjutkan dengan iterasi berikutnya.";
Blockly.Msg.CONTROLS_FLOW_STATEMENTS_WARNING = "Peringatan: Blok ini hanya dapat digunakan dalam loop.";
Blockly.Msg.CONTROLS_FOREACH_HELPURL = "https://code.google.com/p/blockly/wiki/Loops#for_each for each block";
Blockly.Msg.CONTROLS_FOREACH_INPUT_INLIST = "di dalam list";
Blockly.Msg.CONTROLS_FOREACH_INPUT_INLIST_TAIL = ""; // untranslated
Blockly.Msg.CONTROLS_FOREACH_INPUT_ITEM = "untuk setiap item";
Blockly.Msg.CONTROLS_FOREACH_TOOLTIP = "Untuk tiap-tiap item di dalam list, tetapkan variabel '%1' ke dalam item, selanjutnya kerjakan beberapa statement.";
Blockly.Msg.CONTROLS_FOR_HELPURL = "https://code.google.com/p/blockly/wiki/Loops#count_with";
Blockly.Msg.CONTROLS_FOR_INPUT_FROM_TO_BY = "dari %1 ke %2 dengan step / penambahan %3";
Blockly.Msg.CONTROLS_FOR_INPUT_WITH = "Cacah dengan";
Blockly.Msg.CONTROLS_FOR_TOOLTIP = "Menggunakan variabel %1 dengan mengambil nilai dari batas awal hingga ke batas akhir, dengan interval tertentu, dan mengerjakan block tertentu.";
Blockly.Msg.CONTROLS_IF_ELSEIF_TOOLTIP = "tambahkan prasyarat ke dalam blok IF.";
Blockly.Msg.CONTROLS_IF_ELSE_TOOLTIP = "Terakhir, tambahkan tangkap-semua kondisi kedalam blok jika (if).";
Blockly.Msg.CONTROLS_IF_HELPURL = "http://code.google.com/p/blockly/wiki/If_Then";
Blockly.Msg.CONTROLS_IF_IF_TOOLTIP = "Menambahkan, menghapus, atau menyusun kembali bagian untuk mengkonfigurasi blok IF ini.";
Blockly.Msg.CONTROLS_IF_MSG_ELSE = "else";
Blockly.Msg.CONTROLS_IF_MSG_ELSEIF = "else if";
Blockly.Msg.CONTROLS_IF_MSG_IF = "Jika";
Blockly.Msg.CONTROLS_IF_TOOLTIP_1 = "jika nilainya benar maka kerjakan perintah berikutnya.";
Blockly.Msg.CONTROLS_IF_TOOLTIP_2 = "jika nilainya benar, maka kerjakan blok perintah yang pertama. Jika tidak, kerjakan blok perintah yang kedua.";
Blockly.Msg.CONTROLS_IF_TOOLTIP_3 = "Jika nilai pertama adalah benar (true), maka lakukan perintah-perintah yang berada didalam blok pertama. Jika nilai kedua adalah benar (true), maka lakukan perintah-perintah yang berada didalam blok kedua.";
Blockly.Msg.CONTROLS_IF_TOOLTIP_4 = "Jika blok pertama adalah benar (true), maka lakukan perintah-perintah yang berada didalam blok pertama. Atau jika blok kedua adalah benar (true), maka lakukan perintah-perintah yang berada didalam blok kedua.";
Blockly.Msg.CONTROLS_REPEAT_HELPURL = "https://en.wikipedia.org/wiki/For_loop";
Blockly.Msg.CONTROLS_REPEAT_INPUT_DO = "kerjakan";
Blockly.Msg.CONTROLS_REPEAT_TITLE = "ulangi %1 kali";
Blockly.Msg.CONTROLS_REPEAT_TITLE_REPEAT = "ulangi";
Blockly.Msg.CONTROLS_REPEAT_TITLE_TIMES = "kali";
Blockly.Msg.CONTROLS_REPEAT_TOOLTIP = "Lakukan beberapa perintah beberapa kali.";
Blockly.Msg.CONTROLS_WHILEUNTIL_HELPURL = "http://code.google.com/p/blockly/wiki/Repeat";
Blockly.Msg.CONTROLS_WHILEUNTIL_OPERATOR_UNTIL = "Ulangi sampai";
Blockly.Msg.CONTROLS_WHILEUNTIL_OPERATOR_WHILE = "Ulangi jika";
Blockly.Msg.CONTROLS_WHILEUNTIL_TOOLTIP_UNTIL = "Jika sementara nilai tidak benar (false), maka lakukan beberapa perintah.";
Blockly.Msg.CONTROLS_WHILEUNTIL_TOOLTIP_WHILE = "Jika sementara nilai benar (true), maka lakukan beberapa perintah.";
Blockly.Msg.DELETE_BLOCK = "Hapus blok";
Blockly.Msg.DELETE_X_BLOCKS = "Hapus %1 blok";
Blockly.Msg.DISABLE_BLOCK = "Nonaktifkan blok";
Blockly.Msg.DUPLICATE_BLOCK = "Duplikat";
Blockly.Msg.ENABLE_BLOCK = "Aktifkan blok";
Blockly.Msg.EXPAND_ALL = "Kembangkan blok-blok";
Blockly.Msg.EXPAND_BLOCK = "Kembangkan blok";
Blockly.Msg.EXTERNAL_INPUTS = "Input-input eksternal";
Blockly.Msg.HELP = "Tolong";
Blockly.Msg.INLINE_INPUTS = "Input inline";
Blockly.Msg.LISTS_CREATE_EMPTY_HELPURL = "https://en.wikipedia.org/wiki/Linked_list#Empty_lists";
Blockly.Msg.LISTS_CREATE_EMPTY_TITLE = "buat list kosong";
Blockly.Msg.LISTS_CREATE_EMPTY_TOOLTIP = "Mengembalikan daftar, dengan panjang 0, tidak berisi data";
Blockly.Msg.LISTS_CREATE_WITH_CONTAINER_TITLE_ADD = "list";
Blockly.Msg.LISTS_CREATE_WITH_CONTAINER_TOOLTIP = "Tambahkan, hapus, atau susun ulang bagian untuk mengkonfigurasi blok LIST (daftar) ini.";
Blockly.Msg.LISTS_CREATE_WITH_INPUT_WITH = "buat daftar (list) dengan";
Blockly.Msg.LISTS_CREATE_WITH_ITEM_TOOLTIP = "Tambahkan sebuah item ke daftar (list).";
Blockly.Msg.LISTS_CREATE_WITH_TOOLTIP = "Buat sebuah daftar (list) dengan sejumlah item.";
Blockly.Msg.LISTS_GET_INDEX_FIRST = "pertama";
Blockly.Msg.LISTS_GET_INDEX_FROM_END = "# dari akhir";
Blockly.Msg.LISTS_GET_INDEX_FROM_START = "#";
Blockly.Msg.LISTS_GET_INDEX_GET = "dapatkan";
Blockly.Msg.LISTS_GET_INDEX_GET_REMOVE = "dapatkan dan hapus";
Blockly.Msg.LISTS_GET_INDEX_LAST = "terakhir";
Blockly.Msg.LISTS_GET_INDEX_RANDOM = "acak";
Blockly.Msg.LISTS_GET_INDEX_REMOVE = "Hapus";
Blockly.Msg.LISTS_GET_INDEX_TAIL = ""; // untranslated
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_FIRST = "Kembalikan item pertama dalam daftar (list).";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_FROM_END = "Sisipkan item ke dalam posisi yang telah ditentukan didalam list (daftar). Item pertama adalah item yang terakhir.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_FROM_START = "Sisipkan item ke dalam posisi yang telah ditentukan didalam list (daftar). Item pertama adalah item terakhir (yg paling akhir).";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_LAST = "Mengembalikan item pertama dalam list (daftar).";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_RANDOM = "Mengembalikan item acak dalam list (daftar).";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_FIRST = "Menghilangkan dan mengembalikan item pertama dalam list (daftar).";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_FROM_END = "Menghilangkan dan mengembalikan barang di posisi tertentu dalam list (daftar). #1 adalah item terakhir.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_FROM_START = "Menghilangkan dan mengembalikan barang di posisi tertentu dalam list (daftar). #1 adalah item pertama.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_LAST = "Menghilangkan dan mengembalikan item terakhir dalam list (daftar).";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_RANDOM = "Menghilangkan dan mengembalikan barang dengan acak dalam list (daftar).";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_FIRST = "Menghapus item pertama dalam daftar.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_FROM_END = "Menghapus item dengan posisi tertentu dalam daftar. Item pertama adalah item yang terakhir.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_FROM_START = "Menghapus item dengan posisi tertentu dalam daftar. Item pertama adalah item yang terakhir.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_LAST = "Menghapus item terakhir dalam daftar.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_RANDOM = "Menghapus sebuah item secara acak dalam list.";
Blockly.Msg.LISTS_GET_SUBLIST_END_FROM_END = "ke # dari akhir";
Blockly.Msg.LISTS_GET_SUBLIST_END_FROM_START = "ke #";
Blockly.Msg.LISTS_GET_SUBLIST_END_LAST = "ke yang paling akhir";
Blockly.Msg.LISTS_GET_SUBLIST_HELPURL = "https://code.google.com/p/blockly/wiki/Lists#Getting_a_sublist";
Blockly.Msg.LISTS_GET_SUBLIST_START_FIRST = "Dapatkan bagian pertama dari list";
Blockly.Msg.LISTS_GET_SUBLIST_START_FROM_END = "Dapatkan bagian list nomor # dari akhir";
Blockly.Msg.LISTS_GET_SUBLIST_START_FROM_START = "Dapatkan bagian daftar dari #";
Blockly.Msg.LISTS_GET_SUBLIST_TAIL = ""; // untranslated
Blockly.Msg.LISTS_GET_SUBLIST_TOOLTIP = "Membuat salinan dari bagian tertentu dari list.";
Blockly.Msg.LISTS_INDEX_OF_FIRST = "cari kejadian pertama item";
Blockly.Msg.LISTS_INDEX_OF_HELPURL = "https://code.google.com/p/blockly/wiki/Lists#Getting_Items_from_a_List";
Blockly.Msg.LISTS_INDEX_OF_LAST = "Cari kejadian terakhir item";
Blockly.Msg.LISTS_INDEX_OF_TOOLTIP = "Mengembalikan indeks dari kejadian pertama/terakhir item dalam daftar. Menghasilkan 0 jika teks tidak ditemukan.";
Blockly.Msg.LISTS_INLIST = "dalam daftar";
Blockly.Msg.LISTS_IS_EMPTY_HELPURL = "https://code.google.com/p/blockly/wiki/Lists#is_empty";
Blockly.Msg.LISTS_IS_EMPTY_TITLE = "%1 kosong";
Blockly.Msg.LISTS_LENGTH_HELPURL = "https://code.google.com/p/blockly/wiki/Lists#length_of";
Blockly.Msg.LISTS_LENGTH_TITLE = "panjang dari %1";
Blockly.Msg.LISTS_LENGTH_TOOLTIP = "Mengembalikan panjang daftar.";
Blockly.Msg.LISTS_REPEAT_HELPURL = "https://code.google.com/p/blockly/wiki/Lists#create_list_with";
Blockly.Msg.LISTS_REPEAT_TITLE = "membuat daftar dengan item %1 diulang %2 kali";
Blockly.Msg.LISTS_REPEAT_TOOLTIP = "Ciptakan daftar yang terdiri dari nilai yang diberikan diulang jumlah waktu yang ditentukan.";
Blockly.Msg.LISTS_SET_INDEX_HELPURL = "https://code.google.com/p/blockly/wiki/Lists#in_list_..._set";
Blockly.Msg.LISTS_SET_INDEX_INPUT_TO = "sebagai";
Blockly.Msg.LISTS_SET_INDEX_INSERT = "sisipkan di";
Blockly.Msg.LISTS_SET_INDEX_SET = "tetapkan";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_FIRST = "Sisipkan item di bagian awal dari list.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_FROM_END = "Sisipkan item ke dalam posisi yang telah ditentukan di dalam list. #1 adalah item yang terakhir.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_FROM_START = "Sisipkan item ke dalam posisi yang telah ditentukan di dalam list. #1 adalah item yang pertama.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_LAST = "Tambahkan item ke bagian akhir list.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_RANDOM = "Sisipkan item secara acak ke dalam list.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_FIRST = "Tetapkan item pertama di dalam list.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_FROM_END = "Tetapkan item ke dalam posisi yang telah ditentukan di dalam list. #1 adalah item yang terakhir.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_FROM_START = "Tetapkan item ke dalam posisi yang telah ditentukan di dalam list. #1 adalah item yang pertama.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_LAST = "Menetapkan item terakhir dalam list.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_RANDOM = "Tetapkan secara acak sebuah item dalam list.";
Blockly.Msg.LISTS_TOOLTIP = "Mengembalikan nilai benar (true) jika list kosong.";
Blockly.Msg.LOGIC_BOOLEAN_FALSE = "Salah";
Blockly.Msg.LOGIC_BOOLEAN_HELPURL = "http://code.google.com/p/blockly/wiki/True_False";
Blockly.Msg.LOGIC_BOOLEAN_TOOLTIP = "Mengembalikan betul (true) atau salah (false).";
Blockly.Msg.LOGIC_BOOLEAN_TRUE = "Benar";
Blockly.Msg.LOGIC_COMPARE_HELPURL = "https://en.wikipedia.org/wiki/Inequality_(mathematics)";
Blockly.Msg.LOGIC_COMPARE_TOOLTIP_EQ = "Mengembalikan betul jika input kedua-duanya sama dengan satu sama lain.";
Blockly.Msg.LOGIC_COMPARE_TOOLTIP_GT = "Mengembalikan nilai benar (true) jika input yang pertama lebih besar dari input yang kedua.";
Blockly.Msg.LOGIC_COMPARE_TOOLTIP_GTE = "Mengembalikan nilai benar (true) jika input yang pertama lebih besar dari atau sama dengan input yang kedua.";
Blockly.Msg.LOGIC_COMPARE_TOOLTIP_LT = "Mengembalikan nilai benar (true) jika input yang pertama lebih kecil dari input yang kedua.";
Blockly.Msg.LOGIC_COMPARE_TOOLTIP_LTE = "Mengembalikan nilai benar (true) jika input yang pertama lebih kecil atau sama dengan input yang kedua .";
Blockly.Msg.LOGIC_COMPARE_TOOLTIP_NEQ = "Mengembalikan nilai benar (true) jika kedua input tidak sama satu dengan yang lain.";
Blockly.Msg.LOGIC_NEGATE_HELPURL = "http://code.google.com/p/blockly/wiki/Not";
Blockly.Msg.LOGIC_NEGATE_TITLE = "bukan (not) %1";
Blockly.Msg.LOGIC_NEGATE_TOOLTIP = "Mengembalikan nilai benar (true) jika input false. Mengembalikan nilai salah (false) jika input true.";
Blockly.Msg.LOGIC_NULL = "null";
Blockly.Msg.LOGIC_NULL_HELPURL = "https://en.wikipedia.org/wiki/Nullable_type";
Blockly.Msg.LOGIC_NULL_TOOLTIP = "mengembalikan kosong.";
Blockly.Msg.LOGIC_OPERATION_AND = "dan";
Blockly.Msg.LOGIC_OPERATION_HELPURL = "http://code.google.com/p/blockly/wiki/And_Or";
Blockly.Msg.LOGIC_OPERATION_OR = "atau";
Blockly.Msg.LOGIC_OPERATION_TOOLTIP_AND = "Kembalikan betul jika kedua-dua input adalah betul.";
Blockly.Msg.LOGIC_OPERATION_TOOLTIP_OR = "Mengembalikan nilai benar (true) jika setidaknya salah satu masukan nilainya benar (true).";
Blockly.Msg.LOGIC_TERNARY_CONDITION = "test";
Blockly.Msg.LOGIC_TERNARY_HELPURL = "https://en.wikipedia.org/wiki/%3F:";
Blockly.Msg.LOGIC_TERNARY_IF_FALSE = "jika tidak benar (false)";
Blockly.Msg.LOGIC_TERNARY_IF_TRUE = "jika benar (true)";
Blockly.Msg.LOGIC_TERNARY_TOOLTIP = "Periksa kondisi di \"test\". Jika kondisi benar (true), mengembalikan nilai \"jika benar\" ; Jik sebaliknya akan mengembalikan nilai \"jika salah\".";
Blockly.Msg.MATH_ADDITION_SYMBOL = "+";
Blockly.Msg.MATH_ARITHMETIC_HELPURL = "https://id.wikipedia.org/wiki/Aritmetika";
Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_ADD = "Kembalikan jumlah dari kedua angka.";
Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_DIVIDE = "Kembalikan hasil bagi dari kedua angka.";
Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_MINUS = "Kembalikan selisih dari kedua angka.";
Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_MULTIPLY = "Kembalikan perkalian dari kedua angka.";
Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_POWER = "Kembalikan angka pertama pangkat angka kedua.";
Blockly.Msg.MATH_CHANGE_HELPURL = "https://en.wikipedia.org/wiki/Programming_idiom#Incrementing_a_counter";
Blockly.Msg.MATH_CHANGE_INPUT_BY = "oleh";
Blockly.Msg.MATH_CHANGE_TITLE_CHANGE = "ubah";
Blockly.Msg.MATH_CHANGE_TOOLTIP = "Tambahkan angka kedalam variabel '%1'.";
Blockly.Msg.MATH_CONSTANT_HELPURL = "https://en.wikipedia.org/wiki/Mathematical_constant";
Blockly.Msg.MATH_CONSTANT_TOOLTIP = "Kembalikan salah satu konstant: π (3.141…), e (2.718…), φ (1.618…), sqrt(2) (1.414…), sqrt(½) (0.707…), atau ∞ (infinity).";
Blockly.Msg.MATH_CONSTRAIN_HELPURL = "https://en.wikipedia.org/wiki/Clamping_%28graphics%29";
Blockly.Msg.MATH_CONSTRAIN_TITLE = "Batasi %1 rendah %2 tinggi %3";
Blockly.Msg.MATH_CONSTRAIN_TOOLTIP = "Batasi angka antara batas yang ditentukan (inklusif).";
Blockly.Msg.MATH_DIVISION_SYMBOL = "÷";
Blockly.Msg.MATH_IS_DIVISIBLE_BY = "dibagi oleh";
Blockly.Msg.MATH_IS_EVEN = "adalah bilangan genap";
Blockly.Msg.MATH_IS_NEGATIVE = "adalah bilangan negatif";
Blockly.Msg.MATH_IS_ODD = "adalah bilangan ganjil";
Blockly.Msg.MATH_IS_POSITIVE = "adalah bilangan positif";
Blockly.Msg.MATH_IS_PRIME = "adalah bilangan pokok";
Blockly.Msg.MATH_IS_TOOLTIP = "Periksa apakah angka adalah bilangan genap, bilangan pokok, bilangan bulat, bilangan positif, bilangan negatif, atau apakan bisa dibagi oleh angka tertentu. Mengembalikan benar (true) atau salah (false).";
Blockly.Msg.MATH_IS_WHOLE = "adalah bilangan bulat";
Blockly.Msg.MATH_MODULO_HELPURL = "https://en.wikipedia.org/wiki/Modulo_operation";
Blockly.Msg.MATH_MODULO_TITLE = "sisa %1 ÷ %2";
Blockly.Msg.MATH_MODULO_TOOLTIP = "Kembalikan sisa dari pembagian ke dua angka.";
Blockly.Msg.MATH_MULTIPLICATION_SYMBOL = "×";
Blockly.Msg.MATH_NUMBER_HELPURL = "https://en.wikipedia.org/wiki/Number";
Blockly.Msg.MATH_NUMBER_TOOLTIP = "Suatu angka.";
Blockly.Msg.MATH_ONLIST_HELPURL = ""; // untranslated
Blockly.Msg.MATH_ONLIST_OPERATOR_AVERAGE = "rata-rata dari list (daftar)";
Blockly.Msg.MATH_ONLIST_OPERATOR_MAX = "maximum dari list (daftar)";
Blockly.Msg.MATH_ONLIST_OPERATOR_MEDIAN = "median dari list (daftar)";
Blockly.Msg.MATH_ONLIST_OPERATOR_MIN = "minimum dari list (daftar)";
Blockly.Msg.MATH_ONLIST_OPERATOR_MODE = "mode-mode dari list (daftar)";
Blockly.Msg.MATH_ONLIST_OPERATOR_RANDOM = "item acak dari list (daftar)";
Blockly.Msg.MATH_ONLIST_OPERATOR_STD_DEV = "deviasi standar dari list (daftar)";
Blockly.Msg.MATH_ONLIST_OPERATOR_SUM = "jumlah dari list (daftar)";
Blockly.Msg.MATH_ONLIST_TOOLTIP_AVERAGE = "Kembalikan rata-rata (mean aritmetik) dari nilai numerik dari list (daftar).";
Blockly.Msg.MATH_ONLIST_TOOLTIP_MAX = "Kembalikan angka terbesar dari list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_MEDIAN = "Kembalikan median dari list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_MIN = "Kembalikan angka terkecil dari list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_MODE = "Kembalikan list berisi item-item yang paling umum dari dalam list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_RANDOM = "Kembalikan element acak dari list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_STD_DEV = "Kembalikan standard deviasi dari list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_SUM = "Kembalikan jumlah dari seluruh bilangan dari list.";
Blockly.Msg.MATH_POWER_SYMBOL = "^";
Blockly.Msg.MATH_RANDOM_FLOAT_HELPURL = "https://en.wikipedia.org/wiki/Random_number_generation";
Blockly.Msg.MATH_RANDOM_FLOAT_TITLE_RANDOM = "Nilai pecahan acak";
Blockly.Msg.MATH_RANDOM_FLOAT_TOOLTIP = "Mengembalikan nilai acak pecahan antara 0.0 (inklusif) dan 1.0 (ekslusif).";
Blockly.Msg.MATH_RANDOM_INT_HELPURL = "https://en.wikipedia.org/wiki/Random_number_generation";
Blockly.Msg.MATH_RANDOM_INT_TITLE = "acak bulat dari %1 sampai %2";
Blockly.Msg.MATH_RANDOM_INT_TOOLTIP = "Mengembalikan bilangan acak antara dua batas yang ditentukan, inklusif.";
Blockly.Msg.MATH_ROUND_HELPURL = "https://en.wikipedia.org/wiki/Rounding";
Blockly.Msg.MATH_ROUND_OPERATOR_ROUND = "membulatkan";
Blockly.Msg.MATH_ROUND_OPERATOR_ROUNDDOWN = "membulatkan kebawah";
Blockly.Msg.MATH_ROUND_OPERATOR_ROUNDUP = "mengumpulkan";
Blockly.Msg.MATH_ROUND_TOOLTIP = "Bulatkan suatu bilangan naik atau turun.";
Blockly.Msg.MATH_SINGLE_HELPURL = "https://en.wikipedia.org/wiki/Square_root";
Blockly.Msg.MATH_SINGLE_OP_ABSOLUTE = "mutlak";
Blockly.Msg.MATH_SINGLE_OP_ROOT = "akar";
Blockly.Msg.MATH_SINGLE_TOOLTIP_ABS = "Kembalikan nilai absolut angka.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_EXP = "Kembalikan 10 pangkat angka.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_LN = "Kembalikan logaritma natural dari angka.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_LOG10 = "Kembalikan dasar logaritma 10 dari angka.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_NEG = "Kembalikan penyangkalan terhadap angka.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_POW10 = "Kembalikan 10 pangkat angka.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_ROOT = "Kembalikan akar dari angka.";
Blockly.Msg.MATH_SUBTRACTION_SYMBOL = "-";
Blockly.Msg.MATH_TRIG_ACOS = "acos";
Blockly.Msg.MATH_TRIG_ASIN = "asin";
Blockly.Msg.MATH_TRIG_ATAN = "atan";
Blockly.Msg.MATH_TRIG_COS = "cos";
Blockly.Msg.MATH_TRIG_HELPURL = "https://en.wikipedia.org/wiki/Trigonometric_functions";
Blockly.Msg.MATH_TRIG_SIN = "sin";
Blockly.Msg.MATH_TRIG_TAN = "tan";
Blockly.Msg.MATH_TRIG_TOOLTIP_ACOS = "Kembalikan acosine dari angka.";
Blockly.Msg.MATH_TRIG_TOOLTIP_ASIN = "Kembalikan asin dari angka.";
Blockly.Msg.MATH_TRIG_TOOLTIP_ATAN = "Kembalikan atan dari angka.";
Blockly.Msg.MATH_TRIG_TOOLTIP_COS = "Kembalikan cos dari derajat (bukan radian).";
Blockly.Msg.MATH_TRIG_TOOLTIP_SIN = "Kembalikan sinus dari derajat (bukan radian).";
Blockly.Msg.MATH_TRIG_TOOLTIP_TAN = "Kembalikan tangen dari derajat (tidak radian).";
Blockly.Msg.NEW_VARIABLE = "Pembolehubah baru...";
Blockly.Msg.NEW_VARIABLE_TITLE = "Nama pembolehubah baru:";
Blockly.Msg.ORDINAL_NUMBER_SUFFIX = ""; // untranslated
Blockly.Msg.PROCEDURES_BEFORE_PARAMS = "dengan:";
Blockly.Msg.PROCEDURES_CALLNORETURN_CALL = ""; // untranslated
Blockly.Msg.PROCEDURES_CALLNORETURN_HELPURL = "https://en.wikipedia.org/wiki/Procedure_%28computer_science%29";
Blockly.Msg.PROCEDURES_CALLNORETURN_TOOLTIP = "Menjalankan fungsi '%1' yang ditetapkan pengguna.";
Blockly.Msg.PROCEDURES_CALLRETURN_HELPURL = "https://en.wikipedia.org/wiki/Procedure_%28computer_science%29";
Blockly.Msg.PROCEDURES_CALLRETURN_TOOLTIP = "Menjalankan fungsi '%1' yang ditetapkan pengguna dan menggunakan outputnya.";
Blockly.Msg.PROCEDURES_CREATE_DO = "Buat '%1'";
Blockly.Msg.PROCEDURES_DEFNORETURN_DO = ""; // untranslated
Blockly.Msg.PROCEDURES_DEFNORETURN_HELPURL = "https://en.wikipedia.org/wiki/Procedure_%28computer_science%29";
Blockly.Msg.PROCEDURES_DEFNORETURN_PROCEDURE = "buat sesuatu";
Blockly.Msg.PROCEDURES_DEFNORETURN_TITLE = "untuk";
Blockly.Msg.PROCEDURES_DEFNORETURN_TOOLTIP = "Menciptakan sebuah fungsi dengan tiada output.";
Blockly.Msg.PROCEDURES_DEFRETURN_HELPURL = "https://en.wikipedia.org/wiki/Procedure_%28computer_science%29";
Blockly.Msg.PROCEDURES_DEFRETURN_RETURN = "kembali";
Blockly.Msg.PROCEDURES_DEFRETURN_TOOLTIP = "Menciptakan sebuah fungsi dengan satu output.";
Blockly.Msg.PROCEDURES_DEF_DUPLICATE_WARNING = "Peringatan: Fungsi ini memiliki parameter duplikat.";
Blockly.Msg.PROCEDURES_HIGHLIGHT_DEF = "Sorot definisi fungsi";
Blockly.Msg.PROCEDURES_IFRETURN_TOOLTIP = "Jika nilai yang benar, kemudian kembalikan nilai kedua.";
Blockly.Msg.PROCEDURES_IFRETURN_WARNING = "Peringatan: Blok ini dapat digunakan hanya dalam definisi fungsi.";
Blockly.Msg.PROCEDURES_MUTATORARG_TITLE = "masukan Nama:";
Blockly.Msg.PROCEDURES_MUTATORCONTAINER_TITLE = "input";
Blockly.Msg.REMOVE_COMMENT = "Hapus komentar";
Blockly.Msg.RENAME_VARIABLE = "namai ulang variabel...";
Blockly.Msg.RENAME_VARIABLE_TITLE = "Ubah nama semua variabel '%1' menjadi:";
Blockly.Msg.TEXT_APPEND_APPENDTEXT = "tambahkan teks";
Blockly.Msg.TEXT_APPEND_HELPURL = "https://code.google.com/p/blockly/wiki/Text#Text_modification";
Blockly.Msg.TEXT_APPEND_TO = "untuk";
Blockly.Msg.TEXT_APPEND_TOOLTIP = "Tambahkan beberapa teks ke variabel '%1'.";
Blockly.Msg.TEXT_CHANGECASE_HELPURL = "https://code.google.com/p/blockly/wiki/Text#Adjusting_text_case";
Blockly.Msg.TEXT_CHANGECASE_OPERATOR_LOWERCASE = "menjadi huruf kecil";
Blockly.Msg.TEXT_CHANGECASE_OPERATOR_TITLECASE = "menjadi huruf pertama kapital";
Blockly.Msg.TEXT_CHANGECASE_OPERATOR_UPPERCASE = "menjadi huruf kapital";
Blockly.Msg.TEXT_CHANGECASE_TOOLTIP = "Kembalikan kopi dari text dengan kapitalisasi yang berbeda.";
Blockly.Msg.TEXT_CHARAT_FIRST = "ambil huruf pertama";
Blockly.Msg.TEXT_CHARAT_FROM_END = "ambil huruf nomor # dari belakang";
Blockly.Msg.TEXT_CHARAT_FROM_START = "ambil huruf ke #";
Blockly.Msg.TEXT_CHARAT_HELPURL = "https://code.google.com/p/blockly/wiki/Text#Extracting_text";
Blockly.Msg.TEXT_CHARAT_INPUT_INTEXT = "dalam teks";
Blockly.Msg.TEXT_CHARAT_LAST = "ambil huruf terakhir";
Blockly.Msg.TEXT_CHARAT_RANDOM = "ambil huruf secara acak";
Blockly.Msg.TEXT_CHARAT_TAIL = ""; // untranslated
Blockly.Msg.TEXT_CHARAT_TOOLTIP = "Kembalikan karakter dari posisi tertentu.";
Blockly.Msg.TEXT_CREATE_JOIN_ITEM_TOOLTIP = "Tambahkan suatu item ke dalam teks.";
Blockly.Msg.TEXT_CREATE_JOIN_TITLE_JOIN = "join";
Blockly.Msg.TEXT_CREATE_JOIN_TOOLTIP = "Tambah, ambil, atau susun ulang teks blok.";
Blockly.Msg.TEXT_GET_SUBSTRING_END_FROM_END = "pada huruf nomer # dari terakhir";
Blockly.Msg.TEXT_GET_SUBSTRING_END_FROM_START = "pada huruf #";
Blockly.Msg.TEXT_GET_SUBSTRING_END_LAST = "pada huruf terakhir";
Blockly.Msg.TEXT_GET_SUBSTRING_HELPURL = "http://code.google.com/p/blockly/wiki/Text#Extracting_a_region_of_text";
Blockly.Msg.TEXT_GET_SUBSTRING_INPUT_IN_TEXT = "in teks";
Blockly.Msg.TEXT_GET_SUBSTRING_START_FIRST = "ambil bagian teks (substring) dari huruf pertama";
Blockly.Msg.TEXT_GET_SUBSTRING_START_FROM_END = "ambil bagian teks (substring) dari huruf ke # dari terakhir";
Blockly.Msg.TEXT_GET_SUBSTRING_START_FROM_START = "ambil bagian teks (substring) dari huruf no #";
Blockly.Msg.TEXT_GET_SUBSTRING_TAIL = ""; // untranslated
Blockly.Msg.TEXT_GET_SUBSTRING_TOOLTIP = "Mengembalikan spesifik bagian dari teks.";
Blockly.Msg.TEXT_INDEXOF_HELPURL = "https://code.google.com/p/blockly/wiki/Text#Finding_text";
Blockly.Msg.TEXT_INDEXOF_INPUT_INTEXT = "dalam teks";
Blockly.Msg.TEXT_INDEXOF_OPERATOR_FIRST = "temukan kejadian pertama dalam teks";
Blockly.Msg.TEXT_INDEXOF_OPERATOR_LAST = "temukan kejadian terakhir dalam teks";
Blockly.Msg.TEXT_INDEXOF_TAIL = ""; // untranslated
Blockly.Msg.TEXT_INDEXOF_TOOLTIP = "Kembalikan indeks pertama dan terakhir dari kejadian pertama/terakhir dari teks pertama dalam teks kedua. Kembalikan 0 jika teks tidak ditemukan.";
Blockly.Msg.TEXT_ISEMPTY_HELPURL = "https://code.google.com/p/blockly/wiki/Text#Checking_for_empty_text";
Blockly.Msg.TEXT_ISEMPTY_TITLE = "%1 kosong";
Blockly.Msg.TEXT_ISEMPTY_TOOLTIP = "Kembalikan benar (true) jika teks yang disediakan kosong.";
Blockly.Msg.TEXT_JOIN_HELPURL = "https://code.google.com/p/blockly/wiki/Text#Text_creation";
Blockly.Msg.TEXT_JOIN_TITLE_CREATEWITH = "Buat teks dengan";
Blockly.Msg.TEXT_JOIN_TOOLTIP = "Buat teks dengan cara gabungkan sejumlah item.";
Blockly.Msg.TEXT_LENGTH_HELPURL = "https://code.google.com/p/blockly/wiki/Text#Text_modification";
Blockly.Msg.TEXT_LENGTH_TITLE = "panjang dari %1";
Blockly.Msg.TEXT_LENGTH_TOOLTIP = "Kembalikan sejumlah huruf (termasuk spasi) dari teks yang disediakan.";
Blockly.Msg.TEXT_PRINT_HELPURL = "https://code.google.com/p/blockly/wiki/Text#Printing_text";
Blockly.Msg.TEXT_PRINT_TITLE = "cetak %1";
Blockly.Msg.TEXT_PRINT_TOOLTIP = "Cetak teks yant ditentukan, angka atau ninlai lainnya.";
Blockly.Msg.TEXT_PROMPT_HELPURL = "https://code.google.com/p/blockly/wiki/Text#Getting_input_from_the_user";
Blockly.Msg.TEXT_PROMPT_TOOLTIP_NUMBER = "Meminta pengguna untuk memberi sebuah angka.";
Blockly.Msg.TEXT_PROMPT_TOOLTIP_TEXT = "Meminta pengguna untuk memberi beberapa teks.";
Blockly.Msg.TEXT_PROMPT_TYPE_NUMBER = "Meminta angka dengan pesan";
Blockly.Msg.TEXT_PROMPT_TYPE_TEXT = "meminta teks dengan pesan";
Blockly.Msg.TEXT_TEXT_HELPURL = "https://en.wikipedia.org/wiki/String_(computer_science)";
Blockly.Msg.TEXT_TEXT_TOOLTIP = "Huruf, kata atau baris teks.";
Blockly.Msg.TEXT_TRIM_HELPURL = "https://code.google.com/p/blockly/wiki/Text#Trimming_%28removing%29_spaces";
Blockly.Msg.TEXT_TRIM_OPERATOR_BOTH = "pangkas ruang dari kedua belah sisi";
Blockly.Msg.TEXT_TRIM_OPERATOR_LEFT = "pangkas ruang dari sisi kiri";
Blockly.Msg.TEXT_TRIM_OPERATOR_RIGHT = "pangkas ruang dari sisi kanan";
Blockly.Msg.TEXT_TRIM_TOOLTIP = "Kembali salinan teks dengan spasi dihapus dari satu atau kedua ujungnya.";
Blockly.Msg.VARIABLES_DEFAULT_NAME = "item";
Blockly.Msg.VARIABLES_GET_CREATE_SET = "Membuat 'tetapkan %1'";
Blockly.Msg.VARIABLES_GET_HELPURL = "http://code.google.com/p/blockly/wiki/Variables#Get";
Blockly.Msg.VARIABLES_GET_TAIL = ""; // untranslated
Blockly.Msg.VARIABLES_GET_TITLE = ""; // untranslated
Blockly.Msg.VARIABLES_GET_TOOLTIP = "Mengembalikan nilai variabel ini.";
Blockly.Msg.VARIABLES_SET_CREATE_GET = "Membuat 'dapatkan %1'";
Blockly.Msg.VARIABLES_SET_HELPURL = "http://code.google.com/p/blockly/wiki/Variables#Set";
Blockly.Msg.VARIABLES_SET_TAIL = "untuk";
Blockly.Msg.VARIABLES_SET_TITLE = "tetapkan";
Blockly.Msg.VARIABLES_SET_TOOLTIP = "tetapkan variabel ini dengan input yang sama.";
Blockly.Msg.PROCEDURES_DEFRETURN_TITLE = Blockly.Msg.PROCEDURES_DEFNORETURN_TITLE;
Blockly.Msg.LISTS_GET_SUBLIST_INPUT_IN_LIST = Blockly.Msg.LISTS_INLIST;
Blockly.Msg.LISTS_SET_INDEX_INPUT_IN_LIST = Blockly.Msg.LISTS_INLIST;
Blockly.Msg.PROCEDURES_DEFRETURN_PROCEDURE = Blockly.Msg.PROCEDURES_DEFNORETURN_PROCEDURE;
Blockly.Msg.VARIABLES_SET_ITEM = Blockly.Msg.VARIABLES_DEFAULT_NAME;
Blockly.Msg.LISTS_CREATE_WITH_ITEM_TITLE = Blockly.Msg.VARIABLES_DEFAULT_NAME;
Blockly.Msg.MATH_CHANGE_TITLE_ITEM = Blockly.Msg.VARIABLES_DEFAULT_NAME;
Blockly.Msg.VARIABLES_GET_ITEM = Blockly.Msg.VARIABLES_DEFAULT_NAME;
Blockly.Msg.PROCEDURES_DEFRETURN_DO = Blockly.Msg.PROCEDURES_DEFNORETURN_DO;
Blockly.Msg.LISTS_GET_INDEX_HELPURL = Blockly.Msg.LISTS_INDEX_OF_HELPURL;
Blockly.Msg.TEXT_CREATE_JOIN_ITEM_TITLE_ITEM = Blockly.Msg.VARIABLES_DEFAULT_NAME;
Blockly.Msg.CONTROLS_IF_MSG_THEN = Blockly.Msg.CONTROLS_REPEAT_INPUT_DO;
Blockly.Msg.LISTS_INDEX_OF_INPUT_IN_LIST = Blockly.Msg.LISTS_INLIST;
Blockly.Msg.PROCEDURES_CALLRETURN_CALL = Blockly.Msg.PROCEDURES_CALLNORETURN_CALL;
Blockly.Msg.LISTS_GET_INDEX_INPUT_IN_LIST = Blockly.Msg.LISTS_INLIST;
Blockly.Msg.CONTROLS_FOR_INPUT_DO = Blockly.Msg.CONTROLS_REPEAT_INPUT_DO;
Blockly.Msg.CONTROLS_FOREACH_INPUT_DO = Blockly.Msg.CONTROLS_REPEAT_INPUT_DO;
Blockly.Msg.CONTROLS_IF_IF_TITLE_IF = Blockly.Msg.CONTROLS_IF_MSG_IF;
Blockly.Msg.CONTROLS_WHILEUNTIL_INPUT_DO = Blockly.Msg.CONTROLS_REPEAT_INPUT_DO;
Blockly.Msg.CONTROLS_IF_ELSEIF_TITLE_ELSEIF = Blockly.Msg.CONTROLS_IF_MSG_ELSEIF;
Blockly.Msg.TEXT_APPEND_VARIABLE = Blockly.Msg.VARIABLES_DEFAULT_NAME;
Blockly.Msg.CONTROLS_IF_ELSE_TITLE_ELSE = Blockly.Msg.CONTROLS_IF_MSG_ELSE; | TechplexEngineer/blockly-old | msg/js/id.js | JavaScript | apache-2.0 | 29,832 |
package servicebroker
import (
"reflect"
"testing"
schema "github.com/lestrrat/go-jsschema"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"github.com/openshift/origin/pkg/openservicebroker/api"
templateapi "github.com/openshift/origin/pkg/template/api"
)
func TestServiceFromTemplate(t *testing.T) {
template := &templateapi.Template{
ObjectMeta: metav1.ObjectMeta{
Name: "name",
UID: "ee33151d-a34d-442d-a0ca-6353b73a58fd",
Annotations: map[string]string{
"description": "description",
"tags": "tag1,tag2",
"openshift.io/display-name": "displayName",
"iconClass": "iconClass",
"template.openshift.io/long-description": "longDescription",
"template.openshift.io/provider-display-name": "providerDisplayName",
"template.openshift.io/documentation-url": "documentationURL",
"template.openshift.io/support-url": "supportURL",
},
},
Parameters: []templateapi.Parameter{
{
Name: "param1",
Required: true,
},
{
Name: "param2",
},
},
}
expectedService := &api.Service{
Name: "name",
ID: "ee33151d-a34d-442d-a0ca-6353b73a58fd",
Description: "description",
Tags: []string{"tag1", "tag2"},
Bindable: true,
Metadata: map[string]interface{}{
"providerDisplayName": "providerDisplayName",
"documentationUrl": "documentationURL",
"supportUrl": "supportURL",
"displayName": "displayName",
"console.openshift.io/iconClass": "iconClass",
"longDescription": "longDescription",
},
Plans: []api.Plan{
{
ID: "ee33151d-a34d-442d-a0ca-6353b73a58fd",
Name: "default",
Description: "Default plan",
Free: true,
Bindable: true,
Schemas: api.Schema{
ServiceInstances: api.ServiceInstances{
Create: map[string]*schema.Schema{
"parameters": {
Type: schema.PrimitiveTypes{schema.ObjectType},
SchemaRef: "http://json-schema.org/draft-04/schema",
Required: []string{
"template.openshift.io/namespace",
"template.openshift.io/requester-username",
"param1",
},
Properties: map[string]*schema.Schema{
"template.openshift.io/namespace": {
Title: "Template service broker: namespace",
Description: "OpenShift namespace in which to provision service",
Type: schema.PrimitiveTypes{schema.StringType},
},
"template.openshift.io/requester-username": {
Title: "Template service broker: requester username",
Description: "OpenShift user requesting provision/bind",
Type: schema.PrimitiveTypes{schema.StringType},
},
"param1": {
Default: "",
Type: schema.PrimitiveTypes{schema.StringType},
},
"param2": {
Default: "",
Type: schema.PrimitiveTypes{schema.StringType},
},
},
},
},
},
ServiceBindings: api.ServiceBindings{
Create: map[string]*schema.Schema{
"parameters": {
Type: schema.PrimitiveTypes{schema.ObjectType},
SchemaRef: "http://json-schema.org/draft-04/schema",
Required: []string{"template.openshift.io/requester-username"},
Properties: map[string]*schema.Schema{
"template.openshift.io/requester-username": {
Title: "Template service broker: requester username",
Description: "OpenShift user requesting provision/bind",
Type: schema.PrimitiveTypes{schema.StringType},
},
},
},
},
},
},
},
},
}
service := serviceFromTemplate(template)
if !reflect.DeepEqual(service, expectedService) {
t.Error("service did not match expectedService")
}
}
| thrasher-redhat/origin | pkg/template/servicebroker/catalog_test.go | GO | apache-2.0 | 3,950 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.mllp;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.impl.UriEndpointComponent;
/**
* Represents the component that manages {@link MllpEndpoint}.
*/
public class MllpComponent extends UriEndpointComponent {
public static final String MLLP_LOG_PHI_PROPERTY = "org.apache.camel.component.mllp.logPHI";
public MllpComponent() {
super(MllpEndpoint.class);
}
public MllpComponent(CamelContext context) {
super(context, MllpEndpoint.class);
}
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
MllpEndpoint endpoint = new MllpEndpoint(uri, this);
setProperties(endpoint, parameters);
// mllp://hostname:port
String hostPort;
// look for options
int optionsStartIndex = uri.indexOf('?');
if (-1 == optionsStartIndex) {
// No options - just get the host/port stuff
hostPort = uri.substring(7);
} else {
hostPort = uri.substring(7, optionsStartIndex);
}
// Make sure it has a host - may just be a port
int colonIndex = hostPort.indexOf(':');
if (-1 != colonIndex) {
endpoint.setHostname(hostPort.substring(0, colonIndex));
endpoint.setPort(Integer.parseInt(hostPort.substring(colonIndex + 1)));
} else {
// No host specified - leave the default host and set the port
endpoint.setPort(Integer.parseInt(hostPort.substring(colonIndex + 1)));
}
return endpoint;
}
}
| tkopczynski/camel | components/camel-mllp/src/main/java/org/apache/camel/component/mllp/MllpComponent.java | Java | apache-2.0 | 2,487 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Remote;
namespace Microsoft.CodeAnalysis.FindSymbols
{
public static partial class SymbolFinder
{
internal static Task<RemoteHostClient.Session> TryGetRemoteSessionAsync(
Solution solution, CancellationToken cancellationToken)
=> TryGetRemoteSessionAsync(solution, serverCallback: null, cancellationToken: cancellationToken);
private static async Task<RemoteHostClient.Session> TryGetRemoteSessionAsync(
Solution solution, object serverCallback, CancellationToken cancellationToken)
{
var outOfProcessAllowed = solution.Workspace.Options.GetOption(SymbolFinderOptions.OutOfProcessAllowed);
if (!outOfProcessAllowed)
{
return null;
}
var client = await solution.Workspace.TryGetRemoteHostClientAsync(cancellationToken).ConfigureAwait(false);
if (client == null)
{
return null;
}
return await client.TryCreateCodeAnalysisServiceSessionAsync(
solution, serverCallback, cancellationToken).ConfigureAwait(false);
}
}
} | amcasey/roslyn | src/Workspaces/Core/Portable/FindSymbols/SymbolFinder_Remote.cs | C# | apache-2.0 | 1,401 |
from threading import Timer
class RepeatedTimer(object):
def __init__(self, interval, function, *args, **kwargs):
self._timer = None
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.is_running = False
self.start()
def _run(self):
self.is_running = False
self.start()
self.function(*self.args, **self.kwargs)
def start(self):
if not self.is_running:
self._timer = Timer(self.interval, self._run)
self._timer.start()
self.is_running = True
def stop(self):
self._timer.cancel()
self.is_running = False
| tecdct2941/nxos_dashboard | repeated_timer.py | Python | apache-2.0 | 721 |
mingw32-make.exe mingw | dodoru/renderer.gua | build_win_mingw.bat | Batchfile | apache-2.0 | 22 |
using System;
using System.Collections.Generic;
using System.Drawing;
using System.IO;
using NUnit.Framework;
using OpenQA.Selenium.Environment;
namespace OpenQA.Selenium
{
[TestFixture]
public class TakesScreenshotTest : DriverTestFixture
{
[TearDown]
public void SwitchToTop()
{
driver.SwitchTo().DefaultContent();
}
[Test]
public void GetScreenshotAsFile()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = simpleTestPage;
string filename = Path.Combine(Path.GetTempPath(), "snapshot" + new Random().Next().ToString() + ".png");
Screenshot screenImage = screenshotCapableDriver.GetScreenshot();
screenImage.SaveAsFile(filename, ScreenshotImageFormat.Png);
Assert.That(File.Exists(filename), Is.True);
Assert.That(new FileInfo(filename).Length, Is.GreaterThan(0));
File.Delete(filename);
}
[Test]
public void GetScreenshotAsBase64()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = simpleTestPage;
Screenshot screenImage = screenshotCapableDriver.GetScreenshot();
string base64 = screenImage.AsBase64EncodedString;
Assert.That(base64.Length, Is.GreaterThan(0));
}
[Test]
public void GetScreenshotAsBinary()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = simpleTestPage;
Screenshot screenImage = screenshotCapableDriver.GetScreenshot();
byte[] bytes = screenImage.AsByteArray;
Assert.That(bytes.Length, Is.GreaterThan(0));
}
[Test]
public void ShouldCaptureScreenshotOfCurrentViewport()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("screen/screen.html");
Screenshot screenshot = screenshotCapableDriver.GetScreenshot();
HashSet<string> actualColors = ScanActualColors(screenshot,
/* stepX in pixels */ 5,
/* stepY in pixels */ 5);
HashSet<string> expectedColors = GenerateExpectedColors( /* initial color */ 0x0F0F0F,
/* color step */ 1000,
/* grid X size */ 6,
/* grid Y size */ 6);
CompareColors(expectedColors, actualColors);
}
[Test]
public void ShouldTakeScreenshotsOfAnElement()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("screen/screen.html");
IWebElement element = driver.FindElement(By.Id("cell11"));
ITakesScreenshot screenshotCapableElement = element as ITakesScreenshot;
if (screenshotCapableElement == null)
{
return;
}
Screenshot screenImage = screenshotCapableElement.GetScreenshot();
byte[] imageData = screenImage.AsByteArray;
Assert.That(imageData, Is.Not.Null);
Assert.That(imageData.Length, Is.GreaterThan(0));
Color pixelColor = GetPixelColor(screenImage, 1, 1);
string pixelColorString = FormatColorToHex(pixelColor.ToArgb());
Assert.AreEqual("#0f12f7", pixelColorString);
}
[Test]
[IgnoreBrowser(Browser.Chrome, "Chrome driver only captures visible viewport.")]
[IgnoreBrowser(Browser.Firefox, "Firfox driver only captures visible viewport.")]
[IgnoreBrowser(Browser.IE, "IE driver only captures visible viewport.")]
[IgnoreBrowser(Browser.Edge, "Edge driver only captures visible viewport.")]
public void ShouldCaptureScreenshotOfPageWithLongX()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("screen/screen_x_long.html");
Screenshot screenshot = screenshotCapableDriver.GetScreenshot();
HashSet<string> actualColors = ScanActualColors(screenshot,
/* stepX in pixels */ 50,
/* stepY in pixels */ 5);
HashSet<string> expectedColors = GenerateExpectedColors( /* initial color */ 0x0F0F0F,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6);
CompareColors(expectedColors, actualColors);
}
[Test]
[IgnoreBrowser(Browser.Chrome, "Chrome driver only captures visible viewport.")]
[IgnoreBrowser(Browser.Firefox, "Firfox driver only captures visible viewport.")]
[IgnoreBrowser(Browser.IE, "IE driver only captures visible viewport.")]
[IgnoreBrowser(Browser.Edge, "Edge driver only captures visible viewport.")]
public void ShouldCaptureScreenshotOfPageWithLongY()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("screen/screen_y_long.html");
Screenshot screenshot = screenshotCapableDriver.GetScreenshot();
HashSet<string> actualColors = ScanActualColors(screenshot,
/* stepX in pixels */ 5,
/* stepY in pixels */ 50);
HashSet<string> expectedColors = GenerateExpectedColors( /* initial color */ 0x0F0F0F,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6);
CompareColors(expectedColors, actualColors);
}
[Test]
[IgnoreBrowser(Browser.Chrome, "Chrome driver only captures visible viewport.")]
[IgnoreBrowser(Browser.Firefox, "Firfox driver only captures visible viewport.")]
[IgnoreBrowser(Browser.IE, "IE driver only captures visible viewport.")]
[IgnoreBrowser(Browser.Edge, "Edge driver only captures visible viewport.")]
public void ShouldCaptureScreenshotOfPageWithTooLongX()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("screen/screen_x_too_long.html");
Screenshot screenshot = screenshotCapableDriver.GetScreenshot();
HashSet<string> actualColors = ScanActualColors(screenshot,
/* stepX in pixels */ 100,
/* stepY in pixels */ 5);
HashSet<string> expectedColors = GenerateExpectedColors( /* initial color */ 0x0F0F0F,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6);
CompareColors(expectedColors, actualColors);
}
[Test]
[IgnoreBrowser(Browser.Chrome, "Chrome driver only captures visible viewport.")]
[IgnoreBrowser(Browser.Firefox, "Firfox driver only captures visible viewport.")]
[IgnoreBrowser(Browser.IE, "IE driver only captures visible viewport.")]
[IgnoreBrowser(Browser.Edge, "Edge driver only captures visible viewport.")]
public void ShouldCaptureScreenshotOfPageWithTooLongY()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("screen/screen_y_too_long.html");
Screenshot screenshot = screenshotCapableDriver.GetScreenshot();
HashSet<string> actualColors = ScanActualColors(screenshot,
/* stepX in pixels */ 5,
/* stepY in pixels */ 100);
HashSet<string> expectedColors = GenerateExpectedColors( /* initial color */ 0x0F0F0F,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6);
CompareColors(expectedColors, actualColors);
}
[Test]
[IgnoreBrowser(Browser.Chrome, "Chrome driver only captures visible viewport.")]
[IgnoreBrowser(Browser.Firefox, "Firfox driver only captures visible viewport.")]
[IgnoreBrowser(Browser.IE, "IE driver only captures visible viewport.")]
[IgnoreBrowser(Browser.Edge, "Edge driver only captures visible viewport.")]
public void ShouldCaptureScreenshotOfPageWithTooLongXandY()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("screen/screen_too_long.html");
Screenshot screenshot = screenshotCapableDriver.GetScreenshot();
HashSet<string> actualColors = ScanActualColors(screenshot,
/* stepX in pixels */ 100,
/* stepY in pixels */ 100);
HashSet<string> expectedColors = GenerateExpectedColors( /* initial color */ 0x0F0F0F,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6);
CompareColors(expectedColors, actualColors);
}
[Test]
public void ShouldCaptureScreenshotAtFramePage()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("screen/screen_frames.html");
WaitFor(FrameToBeAvailableAndSwitchedTo("frame1"), "Did not switch to frame1");
WaitFor(ElementToBeVisibleWithId("content"), "Did not find visible element with id content");
driver.SwitchTo().DefaultContent();
WaitFor(FrameToBeAvailableAndSwitchedTo("frame2"), "Did not switch to frame2");
WaitFor(ElementToBeVisibleWithId("content"), "Did not find visible element with id content");
driver.SwitchTo().DefaultContent();
WaitFor(TitleToBe("screen test"), "Title was not expected value");
Screenshot screenshot = screenshotCapableDriver.GetScreenshot();
HashSet<string> actualColors = ScanActualColors(screenshot,
/* stepX in pixels */ 5,
/* stepY in pixels */ 5);
HashSet<string> expectedColors = GenerateExpectedColors( /* initial color */ 0x0F0F0F,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6);
expectedColors.UnionWith(GenerateExpectedColors( /* initial color */ 0xDFDFDF,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6));
// expectation is that screenshot at page with frames will be taken for full page
CompareColors(expectedColors, actualColors);
}
[Test]
[IgnoreBrowser(Browser.IE, "Color comparisons fail on IE")]
public void ShouldCaptureScreenshotAtIFramePage()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("screen/screen_iframes.html");
Screenshot screenshot = screenshotCapableDriver.GetScreenshot();
HashSet<string> actualColors = ScanActualColors(screenshot,
/* stepX in pixels */ 5,
/* stepY in pixels */ 5);
HashSet<string> expectedColors = GenerateExpectedColors( /* initial color */ 0x0F0F0F,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6);
expectedColors.UnionWith(GenerateExpectedColors( /* initial color */ 0xDFDFDF,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6));
// expectation is that screenshot at page with Iframes will be taken for full page
CompareColors(expectedColors, actualColors);
}
[Test]
[IgnoreBrowser(Browser.Firefox, "Color comparisons fail on Firefox")]
public void ShouldCaptureScreenshotAtFramePageAfterSwitching()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("screen/screen_frames.html");
driver.SwitchTo().Frame(driver.FindElement(By.Id("frame2")));
Screenshot screenshot = screenshotCapableDriver.GetScreenshot();
HashSet<string> actualColors = ScanActualColors(screenshot,
/* stepX in pixels */ 5,
/* stepY in pixels */ 5);
HashSet<string> expectedColors = GenerateExpectedColors( /* initial color */ 0x0F0F0F,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6);
expectedColors.UnionWith(GenerateExpectedColors( /* initial color */ 0xDFDFDF,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6));
// expectation is that screenshot at page with frames after switching to a frame
// will be taken for full page
CompareColors(expectedColors, actualColors);
}
[Test]
[IgnoreBrowser(Browser.IE, "Color comparisons fail on IE")]
[IgnoreBrowser(Browser.Firefox, "Color comparisons fail on Firefox")]
public void ShouldCaptureScreenshotAtIFramePageAfterSwitching()
{
ITakesScreenshot screenshotCapableDriver = driver as ITakesScreenshot;
if (screenshotCapableDriver == null)
{
return;
}
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("screen/screen_iframes.html");
driver.SwitchTo().Frame(driver.FindElement(By.Id("iframe2")));
Screenshot screenshot = screenshotCapableDriver.GetScreenshot();
HashSet<string> actualColors = ScanActualColors(screenshot,
/* stepX in pixels */ 5,
/* stepY in pixels */ 5);
HashSet<string> expectedColors = GenerateExpectedColors( /* initial color */ 0x0F0F0F,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6);
expectedColors.UnionWith(GenerateExpectedColors( /* initial color */ 0xDFDFDF,
/* color step*/ 1000,
/* grid X size */ 6,
/* grid Y size */ 6));
// expectation is that screenshot at page with Iframes after switching to a Iframe
// will be taken for full page
CompareColors(expectedColors, actualColors);
}
private string FormatColorToHex(int colorValue)
{
string pixelColorString = string.Format("#{0:x2}{1:x2}{2:x2}", (colorValue & 0xFF0000) >> 16, (colorValue & 0x00FF00) >> 8, (colorValue & 0x0000FF));
return pixelColorString;
}
private void CompareColors(HashSet<string> expectedColors, HashSet<string> actualColors)
{
// Ignore black and white for further comparison
actualColors.Remove("#000000");
actualColors.Remove("#ffffff");
Assert.That(actualColors, Is.EquivalentTo(expectedColors));
}
private HashSet<string> GenerateExpectedColors(int initialColor, int stepColor, int numberOfSamplesX, int numberOfSamplesY)
{
HashSet<string> colors = new HashSet<string>();
int count = 1;
for (int i = 1; i < numberOfSamplesX; i++)
{
for (int j = 1; j < numberOfSamplesY; j++)
{
int color = initialColor + (count * stepColor);
string hex = FormatColorToHex(color);
colors.Add(hex);
count++;
}
}
return colors;
}
private HashSet<string> ScanActualColors(Screenshot screenshot, int stepX, int stepY)
{
HashSet<string> colors = new HashSet<string>();
#if !NETCOREAPP2_0 && !NETSTANDARD2_0
try
{
Image image = Image.FromStream(new MemoryStream(screenshot.AsByteArray));
Bitmap bitmap = new Bitmap(image);
int height = bitmap.Height;
int width = bitmap.Width;
Assert.That(width, Is.GreaterThan(0));
Assert.That(height, Is.GreaterThan(0));
for (int i = 0; i < width; i = i + stepX)
{
for (int j = 0; j < height; j = j + stepY)
{
string hex = FormatColorToHex(bitmap.GetPixel(i, j).ToArgb());
colors.Add(hex);
}
}
}
catch (Exception e)
{
Assert.Fail("Unable to get actual colors from screenshot: " + e.Message);
}
Assert.That(colors.Count, Is.GreaterThan(0));
#endif
return colors;
}
private Color GetPixelColor(Screenshot screenshot, int x, int y)
{
Color pixelColor = Color.Black;
#if !NETCOREAPP2_0 && !NETSTANDARD2_0
Image image = Image.FromStream(new MemoryStream(screenshot.AsByteArray));
Bitmap bitmap = new Bitmap(image);
pixelColor = bitmap.GetPixel(1, 1);
#endif
return pixelColor;
}
private Func<bool> FrameToBeAvailableAndSwitchedTo(string frameId)
{
return () =>
{
try
{
IWebElement frameElement = driver.FindElement(By.Id(frameId));
driver.SwitchTo().Frame(frameElement);
}
catch(Exception)
{
return false;
}
return true;
};
}
private Func<bool> ElementToBeVisibleWithId(string elementId)
{
return () =>
{
try
{
IWebElement element = driver.FindElement(By.Id(elementId));
return element.Displayed;
}
catch(Exception)
{
return false;
}
};
}
private Func<bool> TitleToBe(string desiredTitle)
{
return () => driver.Title == desiredTitle;
}
}
}
| asashour/selenium | dotnet/test/common/TakesScreenshotTest.cs | C# | apache-2.0 | 22,313 |
// Copyright 2008 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// The following is taken from Closure Library:
//
// buildFromEncodedParts
// splitRe
// ComponentIndex
// split
// removeDotSegments
/**
* Builds a URI string from already-encoded parts.
*
* No encoding is performed. Any component may be omitted as either null or
* undefined.
*
* @param {?string=} opt_scheme The scheme such as 'http'.
* @param {?string=} opt_userInfo The user name before the '@'.
* @param {?string=} opt_domain The domain such as 'www.google.com', already
* URI-encoded.
* @param {(string|number|null)=} opt_port The port number.
* @param {?string=} opt_path The path, already URI-encoded. If it is not
* empty, it must begin with a slash.
* @param {?string=} opt_queryData The URI-encoded query data.
* @param {?string=} opt_fragment The URI-encoded fragment identifier.
* @return {string} The fully combined URI.
*/
function buildFromEncodedParts(opt_scheme, opt_userInfo,
opt_domain, opt_port, opt_path, opt_queryData, opt_fragment) {
var out = [];
if (opt_scheme) {
out.push(opt_scheme, ':');
}
if (opt_domain) {
out.push('//');
if (opt_userInfo) {
out.push(opt_userInfo, '@');
}
out.push(opt_domain);
if (opt_port) {
out.push(':', opt_port);
}
}
if (opt_path) {
out.push(opt_path);
}
if (opt_queryData) {
out.push('?', opt_queryData);
}
if (opt_fragment) {
out.push('#', opt_fragment);
}
return out.join('');
};
/**
* A regular expression for breaking a URI into its component parts.
*
* {@link http://www.gbiv.com/protocols/uri/rfc/rfc3986.html#RFC2234} says
* As the "first-match-wins" algorithm is identical to the "greedy"
* disambiguation method used by POSIX regular expressions, it is natural and
* commonplace to use a regular expression for parsing the potential five
* components of a URI reference.
*
* The following line is the regular expression for breaking-down a
* well-formed URI reference into its components.
*
* <pre>
* ^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?
* 12 3 4 5 6 7 8 9
* </pre>
*
* The numbers in the second line above are only to assist readability; they
* indicate the reference points for each subexpression (i.e., each paired
* parenthesis). We refer to the value matched for subexpression <n> as $<n>.
* For example, matching the above expression to
* <pre>
* http://www.ics.uci.edu/pub/ietf/uri/#Related
* </pre>
* results in the following subexpression matches:
* <pre>
* $1 = http:
* $2 = http
* $3 = //www.ics.uci.edu
* $4 = www.ics.uci.edu
* $5 = /pub/ietf/uri/
* $6 = <undefined>
* $7 = <undefined>
* $8 = #Related
* $9 = Related
* </pre>
* where <undefined> indicates that the component is not present, as is the
* case for the query component in the above example. Therefore, we can
* determine the value of the five components as
* <pre>
* scheme = $2
* authority = $4
* path = $5
* query = $7
* fragment = $9
* </pre>
*
* The regular expression has been modified slightly to expose the
* userInfo, domain, and port separately from the authority.
* The modified version yields
* <pre>
* $1 = http scheme
* $2 = <undefined> userInfo -\
* $3 = www.ics.uci.edu domain | authority
* $4 = <undefined> port -/
* $5 = /pub/ietf/uri/ path
* $6 = <undefined> query without ?
* $7 = Related fragment without #
* </pre>
* @type {!RegExp}
* @private
*/
var splitRe = new RegExp(
'^' +
'(?:' +
'([^:/?#.]+)' + // scheme - ignore special characters
// used by other URL parts such as :,
// ?, /, #, and .
':)?' +
'(?://' +
'(?:([^/?#]*)@)?' + // userInfo
'([\\w\\d\\-\\u0100-\\uffff.%]*)' + // domain - restrict to letters,
// digits, dashes, dots, percent
// escapes, and unicode characters.
'(?::([0-9]+))?' + // port
')?' +
'([^?#]+)?' + // path
'(?:\\?([^#]*))?' + // query
'(?:#(.*))?' + // fragment
'$');
/**
* The index of each URI component in the return value of goog.uri.utils.split.
* @enum {number}
*/
var ComponentIndex = {
SCHEME: 1,
USER_INFO: 2,
DOMAIN: 3,
PORT: 4,
PATH: 5,
QUERY_DATA: 6,
FRAGMENT: 7
};
/**
* Splits a URI into its component parts.
*
* Each component can be accessed via the component indices; for example:
* <pre>
* goog.uri.utils.split(someStr)[goog.uri.utils.CompontentIndex.QUERY_DATA];
* </pre>
*
* @param {string} uri The URI string to examine.
* @return {!Array.<string|undefined>} Each component still URI-encoded.
* Each component that is present will contain the encoded value, whereas
* components that are not present will be undefined or empty, depending
* on the browser's regular expression implementation. Never null, since
* arbitrary strings may still look like path names.
*/
function split(uri) {
// See @return comment -- never null.
return /** @type {!Array.<string|undefined>} */ (
uri.match(splitRe));
}
/**
* Removes dot segments in given path component, as described in
* RFC 3986, section 5.2.4.
*
* @param {string} path A non-empty path component.
* @return {string} Path component with removed dot segments.
*/
export function removeDotSegments(path) {
if (path === '/')
return '/';
var leadingSlash = path[0] === '/' ? '/' : '';
var trailingSlash = path.slice(-1) === '/' ? '/' : '';
var segments = path.split('/');
var out = [];
var up = 0;
for (var pos = 0; pos < segments.length; pos++) {
var segment = segments[pos];
switch (segment) {
case '':
case '.':
break;
case '..':
if (out.length)
out.pop();
else
up++;
break;
default:
out.push(segment);
}
}
if (!leadingSlash) {
while (up-- > 0) {
out.unshift('..');
}
if (out.length === 0)
out.push('.');
}
return leadingSlash + out.join('/') + trailingSlash;
}
/**
* Takes an array of the parts from split and canonicalizes the path part
* and then joins all the parts.
* @param {Array.<string?} parts
* @return {string}
*/
function joinAndCanonicalizePath(parts) {
var path = parts[ComponentIndex.PATH];
path = removeDotSegments(path.replace(/\/\//.g, '/'));
parts[ComponentIndex.PATH] = path;
return buildFromEncodedParts(
parts[ComponentIndex.SCHEME],
parts[ComponentIndex.USER_INFO],
parts[ComponentIndex.DOMAIN],
parts[ComponentIndex.PORT],
parts[ComponentIndex.PATH],
parts[ComponentIndex.QUERY_DATA],
parts[ComponentIndex.FRAGMENT]);
}
/**
* Canonicalizes a URL by eliminating ./ path entries,
* canonicalizing ../ entries, and collapsing occurrences of //.
*
* @param {string} url
* @return {string}
*/
export function canonicalizeUrl(url) {
var parts = split(url);
return joinAndCanonicalizePath(parts);
}
/**
* Resovles a URL.
* @param {string} base The URL acting as the base URL.
* @param {string} to The URL to resolve.
* @return {string}
*/
export function resolveUrl(base, url) {
if (url[0] === '@')
return url;
var parts = split(url);
var baseParts = split(base);
if (parts[ComponentIndex.SCHEME]) {
return joinAndCanonicalizePath(parts);
} else {
parts[ComponentIndex.SCHEME] = baseParts[ComponentIndex.SCHEME];
}
for (var i = ComponentIndex.SCHEME; i <= ComponentIndex.PORT; i++) {
if (!parts[i]) {
parts[i] = baseParts[i];
}
}
if (parts[ComponentIndex.PATH][0] == '/') {
return joinAndCanonicalizePath(parts);
}
var path = baseParts[ComponentIndex.PATH];
var index = path.lastIndexOf('/');
path = path.slice(0, index + 1) + parts[ComponentIndex.PATH];
parts[ComponentIndex.PATH] = path;
return joinAndCanonicalizePath(parts);
}
| passy/traceur-todomvc | src/util/url.js | JavaScript | apache-2.0 | 8,826 |
' Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
Imports System.Threading
Imports Microsoft.CodeAnalysis.VisualBasic.Symbols
Namespace Microsoft.CodeAnalysis.VisualBasic
Friend NotInheritable Class LazyObsoleteDiagnosticInfo
Inherits DiagnosticInfo
Private lazyActualObsoleteDiagnostic As DiagnosticInfo
Private ReadOnly m_symbol As Symbol
Private ReadOnly m_containingSymbol As Symbol
Friend Sub New(sym As Symbol, containingSymbol As Symbol)
MyBase.New(VisualBasic.MessageProvider.Instance, ERRID.Unknown)
Me.m_symbol = sym
Me.m_containingSymbol = containingSymbol
End Sub
Friend Overrides Function GetResolvedInfo() As DiagnosticInfo
If lazyActualObsoleteDiagnostic Is Nothing Then
' A symbol's Obsoleteness may not have been calculated yet if the symbol is coming
' from a different compilation's source. In that case, force completion of attributes.
m_symbol.ForceCompleteObsoleteAttribute()
If m_symbol.ObsoleteState = ThreeState.True Then
Dim inObsoleteContext = ObsoleteAttributeHelpers.GetObsoleteContextState(m_containingSymbol, forceComplete:=True)
Debug.Assert(inObsoleteContext <> ThreeState.Unknown)
If inObsoleteContext = ThreeState.False Then
Dim info As DiagnosticInfo = ObsoleteAttributeHelpers.CreateObsoleteDiagnostic(m_symbol)
If info IsNot Nothing Then
Interlocked.CompareExchange(Me.lazyActualObsoleteDiagnostic, info, Nothing)
Return Me.lazyActualObsoleteDiagnostic
End If
End If
End If
' If this symbol is not obsolete or is in an obsolete context, we don't want to report any diagnostics.
' Therefore make this a Void diagnostic.
Interlocked.CompareExchange(Me.lazyActualObsoleteDiagnostic, ErrorFactory.VoidDiagnosticInfo, Nothing)
End If
Return lazyActualObsoleteDiagnostic
End Function
End Class
End Namespace
| DavidKarlas/roslyn | src/Compilers/VisualBasic/Portable/Errors/LazyObsoleteDiagnosticInfo.vb | Visual Basic | apache-2.0 | 2,344 |
package network
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// LoadBalancerLoadBalancingRulesClient is the network Client
type LoadBalancerLoadBalancingRulesClient struct {
BaseClient
}
// NewLoadBalancerLoadBalancingRulesClient creates an instance of the LoadBalancerLoadBalancingRulesClient client.
func NewLoadBalancerLoadBalancingRulesClient(subscriptionID string) LoadBalancerLoadBalancingRulesClient {
return NewLoadBalancerLoadBalancingRulesClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewLoadBalancerLoadBalancingRulesClientWithBaseURI creates an instance of the LoadBalancerLoadBalancingRulesClient
// client using a custom endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI
// (sovereign clouds, Azure stack).
func NewLoadBalancerLoadBalancingRulesClientWithBaseURI(baseURI string, subscriptionID string) LoadBalancerLoadBalancingRulesClient {
return LoadBalancerLoadBalancingRulesClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// Get gets the specified load balancer load balancing rule.
// Parameters:
// resourceGroupName - the name of the resource group.
// loadBalancerName - the name of the load balancer.
// loadBalancingRuleName - the name of the load balancing rule.
func (client LoadBalancerLoadBalancingRulesClient) Get(ctx context.Context, resourceGroupName string, loadBalancerName string, loadBalancingRuleName string) (result LoadBalancingRule, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/LoadBalancerLoadBalancingRulesClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetPreparer(ctx, resourceGroupName, loadBalancerName, loadBalancingRuleName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.LoadBalancerLoadBalancingRulesClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.LoadBalancerLoadBalancingRulesClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.LoadBalancerLoadBalancingRulesClient", "Get", resp, "Failure responding to request")
return
}
return
}
// GetPreparer prepares the Get request.
func (client LoadBalancerLoadBalancingRulesClient) GetPreparer(ctx context.Context, resourceGroupName string, loadBalancerName string, loadBalancingRuleName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"loadBalancerName": autorest.Encode("path", loadBalancerName),
"loadBalancingRuleName": autorest.Encode("path", loadBalancingRuleName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/loadBalancingRules/{loadBalancingRuleName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client LoadBalancerLoadBalancingRulesClient) GetSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client LoadBalancerLoadBalancingRulesClient) GetResponder(resp *http.Response) (result LoadBalancingRule, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List gets all the load balancing rules in a load balancer.
// Parameters:
// resourceGroupName - the name of the resource group.
// loadBalancerName - the name of the load balancer.
func (client LoadBalancerLoadBalancingRulesClient) List(ctx context.Context, resourceGroupName string, loadBalancerName string) (result LoadBalancerLoadBalancingRuleListResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/LoadBalancerLoadBalancingRulesClient.List")
defer func() {
sc := -1
if result.lblbrlr.Response.Response != nil {
sc = result.lblbrlr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx, resourceGroupName, loadBalancerName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.LoadBalancerLoadBalancingRulesClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.lblbrlr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.LoadBalancerLoadBalancingRulesClient", "List", resp, "Failure sending request")
return
}
result.lblbrlr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.LoadBalancerLoadBalancingRulesClient", "List", resp, "Failure responding to request")
return
}
if result.lblbrlr.hasNextLink() && result.lblbrlr.IsEmpty() {
err = result.NextWithContext(ctx)
return
}
return
}
// ListPreparer prepares the List request.
func (client LoadBalancerLoadBalancingRulesClient) ListPreparer(ctx context.Context, resourceGroupName string, loadBalancerName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"loadBalancerName": autorest.Encode("path", loadBalancerName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/loadBalancingRules", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client LoadBalancerLoadBalancingRulesClient) ListSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client LoadBalancerLoadBalancingRulesClient) ListResponder(resp *http.Response) (result LoadBalancerLoadBalancingRuleListResult, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client LoadBalancerLoadBalancingRulesClient) listNextResults(ctx context.Context, lastResults LoadBalancerLoadBalancingRuleListResult) (result LoadBalancerLoadBalancingRuleListResult, err error) {
req, err := lastResults.loadBalancerLoadBalancingRuleListResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "network.LoadBalancerLoadBalancingRulesClient", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.LoadBalancerLoadBalancingRulesClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.LoadBalancerLoadBalancingRulesClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client LoadBalancerLoadBalancingRulesClient) ListComplete(ctx context.Context, resourceGroupName string, loadBalancerName string) (result LoadBalancerLoadBalancingRuleListResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/LoadBalancerLoadBalancingRulesClient.List")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.List(ctx, resourceGroupName, loadBalancerName)
return
}
| djs55/linuxkit | src/cmd/linuxkit/vendor/github.com/Azure/azure-sdk-for-go/services/network/mgmt/2017-10-01/network/loadbalancerloadbalancingrules.go | GO | apache-2.0 | 10,169 |
# -*- coding: utf-8 -*-
"""
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import dom_parser
from resources.lib.modules import source_utils
class source:
def __init__(self):
self.priority = 1
self.language = ['de']
self.genre_filter = ['horror']
self.domains = ['horrorkino.do.am']
self.base_link = 'http://horrorkino.do.am/'
self.search_link = 'video/shv'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = self.__search([localtitle] + source_utils.aliases_to_array(aliases), year)
if not url and title != localtitle: url = self.__search([title] + source_utils.aliases_to_array(aliases), year)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if not url:
return sources
r = client.request(urlparse.urljoin(self.base_link, url))
r = re.findall('''vicode\s*=\s*["'](.*?)["'];''', r)[0].decode('string_escape')
r = dom_parser.parse_dom(r, 'iframe', req='src')
r = [i.attrs['src'] for i in r]
for i in r:
valid, host = source_utils.is_host_valid(i, hostDict)
if not valid: continue
sources.append({'source': host, 'quality': 'SD', 'language': 'de', 'url': i, 'direct': False, 'debridonly': False, 'checkquality': True})
return sources
except:
return sources
def resolve(self, url):
return url
def __search(self, titles, year):
try:
t = [cleantitle.get(i) for i in set(titles) if i]
y = ['%s' % str(year), '%s' % str(int(year) + 1), '%s' % str(int(year) - 1), '0']
r = client.request(urlparse.urljoin(self.base_link, self.search_link), post={'query': cleantitle.query(titles[0])})
r = dom_parser.parse_dom(r, 'li', attrs={'class': 'entTd'})
r = dom_parser.parse_dom(r, 'div', attrs={'class': 've-screen'}, req='title')
r = [(dom_parser.parse_dom(i, 'a', req='href'), i.attrs['title'].split(' - ')[0]) for i in r]
r = [(i[0][0].attrs['href'], i[1], re.findall('(.+?) \(*(\d{4})', i[1])) for i in r]
r = [(i[0], i[2][0][0] if len(i[2]) > 0 else i[1], i[2][0][1] if len(i[2]) > 0 else '0') for i in r]
r = sorted(r, key=lambda i: int(i[2]), reverse=True) # with year > no year
r = [i[0] for i in r if cleantitle.get(i[1]) in t and i[2] in y][0]
return source_utils.strip_domain(r)
except:
return
| TheWardoctor/Wardoctors-repo | script.module.uncoded/lib/resources/lib/sources/de/horrorkino.py | Python | apache-2.0 | 3,418 |
// Copyright (C) 2015 Davis E. King (davis@dlib.net)
// License: Boost Software License See LICENSE.txt for the full license.
#ifndef DLIB_DNN_CuDNN_H_
#define DLIB_DNN_CuDNN_H_
#ifdef DLIB_USE_CUDA
#include "cuda_errors.h"
namespace dlib
{
class tensor;
class resizable_tensor;
namespace cuda
{
// -----------------------------------------------------------------------------------
class tensor_descriptor
{
/*!
Each tensor object will carry a tensor_descriptor in it when compiled with
CUDA.
!*/
public:
// not copyable
tensor_descriptor(const tensor_descriptor&) = delete;
tensor_descriptor& operator=(const tensor_descriptor&) = delete;
// but is movable
tensor_descriptor(tensor_descriptor&& item) : tensor_descriptor() { swap(item); }
tensor_descriptor& operator=(tensor_descriptor&& item) { swap(item); return *this; }
tensor_descriptor();
~tensor_descriptor();
void set_size(
int n,
int k,
int nr,
int nc
);
/*!
ensures
- if any of the arguments are 0 then they are all set to 0 in the tensor.
!*/
void get_size (
int& n,
int& k,
int& nr,
int& nc
) const;
const void* get_handle (
) const { return handle; }
private:
void swap(tensor_descriptor& item) { std::swap(handle, item.handle); }
void* handle;
};
// ------------------------------------------------------------------------------------
void add(
float beta,
tensor& dest,
float alpha,
const tensor& src
);
/*!
requires
- One of the following is true:
- have_same_dimensions(src, dest)
- src.num_samples()==1 && src.k()==dest.k() && src.nr()==1 && src.nc()==1
- src.num_samples()==1 && src.k()==dest.k() && src.nr()==dest.nr() && src.nc()==dest.nc()
- src.num_samples()==1 && src.k()==1 && src.nr()==dest.nr() && src.nc()==dest.nc()
- is_same_object(src,dest) == false
ensures
- performs: dest = beta*dest + alpha*src
However, how the addition happens depends on the dimensions of src. In
particular, this function adds the scaled values of one src tensor to
dest. Each dimension of the src tensor must match the corresponding
dimension of the dest tensor or must be equal to 1. In the latter case,
the same value from the src tensor, for those dimensions, will be used to
add into the dest tensor.
!*/
void set_tensor (
tensor& t,
float value
);
/*!
ensures
- sets all elements in t equal to value.
!*/
void scale_tensor (
tensor& t,
float value
);
/*!
ensures
- scales all elements of t by the given value. I.e. for all elements E in
t, this function performs:
- E = E*value
!*/
// ------------------------------------------------------------------------------------
void assign_conv_bias_gradient (
tensor& grad,
const tensor& gradient_input
);
/*!
requires
- grad.num_samples() == 1
- grad.k() >= 1
- grad.nr() == 1
- grad.nc() == 1
- gradient_input.k() == grad.k()
- gradient_input.size() > 0
- is_same_object(grad,gradient_input) == false
ensures
- let BIAS be a tensor with all dimensions equal to 1 except for k which is >= 1.
- let OUT be the output of add(1,OUT,1,BIAS)
- let f(gradient_input,BIAS) == dot(gradient_input,OUT)
- Then this function computes the gradient of f() with respect to BIAS and
assigns it to grad.
!*/
// ------------------------------------------------------------------------------------
void batch_normalize_inference (
const double eps,
resizable_tensor& dest,
const tensor& src,
const tensor& gamma,
const tensor& beta,
const tensor& running_means,
const tensor& running_variances
);
void batch_normalize (
const double eps,
resizable_tensor& dest,
resizable_tensor& means,
resizable_tensor& invstds,
const double averaging_factor,
resizable_tensor& running_means,
resizable_tensor& running_variances,
const tensor& src,
const tensor& gamma,
const tensor& beta
);
void batch_normalize_gradient(
const double eps,
const tensor& gradient_input,
const tensor& means,
const tensor& invstds,
const tensor& src,
const tensor& gamma,
tensor& src_grad,
tensor& gamma_grad,
tensor& beta_grad
);
// ------------------------------------------------------------------------------------
void batch_normalize_conv_inference (
const double eps,
resizable_tensor& dest,
const tensor& src,
const tensor& gamma,
const tensor& beta,
const tensor& running_means,
const tensor& running_variances
);
void batch_normalize_conv (
const double eps,
resizable_tensor& dest,
resizable_tensor& means,
resizable_tensor& invstds,
const double averaging_factor,
resizable_tensor& running_means,
resizable_tensor& running_variances,
const tensor& src,
const tensor& gamma,
const tensor& beta
);
void batch_normalize_conv_gradient(
const double eps,
const tensor& gradient_input,
const tensor& means,
const tensor& invstds,
const tensor& src,
const tensor& gamma,
tensor& src_grad,
tensor& gamma_grad,
tensor& beta_grad
);
// ------------------------------------------------------------------------------------
class tensor_conv
{
public:
tensor_conv(const tensor_conv&) = delete;
tensor_conv& operator=(const tensor_conv&) = delete;
tensor_conv();
void clear(
);
~tensor_conv (
);
void operator() (
resizable_tensor& output,
const tensor& data,
const tensor& filters,
int stride_y,
int stride_x,
int padding_y,
int padding_x
);
/*!
requires
- stride_y > 0
- stride_x > 0
- 0 <= padding_y < filters.nr()
- 0 <= padding_x < filters.nc()
- is_same_object(output,data) == false
- is_same_object(output,filters) == false
ensures
- convolves filters over data.
- filters contains filters.num_samples() filters.
- #output.num_samples() == data.num_samples()
- #output.k() == filters.num_samples()
- #output.nr() == 1+(data.nr()-filters.nr()%2)/stride_y
- #output.nc() == 1+(data.nc()-filters.nc()%2)/stride_x
!*/
void get_gradient_for_data (
const tensor& gradient_input,
const tensor& filters,
tensor& data_gradient
);
/*!
requires
- filters has the same dimensions as the filters object give to the
last call to operator().
- data_gradient has the same dimensions as the data object give to the
last call to operator().
- gradient_input has the same dimensions as the output of operator().
- is_same_object(data_gradient,filters) == false
- is_same_object(data_gradient,gradient_input) == false
ensures
- let OUT be the output of (*this)(OUT,data,filters).
- let f(data,filters) == dot(OUT, gradient_input)
- This function finds the gradient of f() with respect to data
and adds this gradient to data_gradient.
!*/
void get_gradient_for_filters (
const tensor& gradient_input,
const tensor& data,
tensor& filters_gradient
);
/*!
requires
- filters_gradient has the same dimensions as the filters object give
to the last call to operator().
- data has the same dimensions as the data object give to the last call
to operator().
- gradient_input has the same dimensions as the output of operator().
- is_same_object(filters_gradient,data) == false
- is_same_object(filters_gradient,gradient_input) == false
ensures
- let OUT be the output of (*this)(OUT,data,filters).
- let f(data,filters) == dot(OUT, gradient_input)
- This function finds the gradient of f() with respect to filters
and assigns this gradient to filters_gradient.
!*/
private:
void setup(
const tensor& data,
const tensor& filters,
int stride_y,
int stride_x,
int padding_y,
int padding_x
);
/*!
requires
- filters.k() == data.k()
- stride_y > 0
- stride_x > 0
- 0 <= padding_y < filters.nr()
- 0 <= padding_x < filters.nc()
!*/
// These variables record the type of data given to the last call to setup().
int stride_y;
int stride_x;
int padding_y;
int padding_x;
long data_num_samples, data_k, data_nr, data_nc;
long filters_num_samples, filters_k, filters_nr, filters_nc;
void* filter_handle;
void* conv_handle;
// dimensions of the output tensor from operator()
int out_num_samples;
int out_k;
int out_nr;
int out_nc;
int forward_algo;
size_t forward_workspace_size_in_bytes;
void* forward_workspace;
int backward_data_algo;
size_t backward_data_workspace_size_in_bytes;
void* backward_data_workspace;
int backward_filters_algo;
size_t backward_filters_workspace_size_in_bytes;
void* backward_filters_workspace;
};
// ------------------------------------------------------------------------------------
class pooling
{
public:
pooling(const pooling&) = delete;
pooling& operator=(const pooling&) = delete;
pooling (
);
~pooling(
);
void clear(
);
void setup_max_pooling(
int window_height,
int window_width,
int stride_y,
int stride_x,
int padding_y,
int padding_x
);
void setup_avg_pooling(
int window_height,
int window_width,
int stride_y,
int stride_x,
int padding_y,
int padding_x
);
bool does_max_pooling(
) const { return do_max_pooling; }
void operator() (
resizable_tensor& dest,
const tensor& src
);
void get_gradient(
const tensor& gradient_input,
const tensor& dest,
const tensor& src,
tensor& grad
);
private:
void setup(
int window_height,
int window_width,
int stride_y,
int stride_x,
int padding_y,
int padding_x,
int pooling_mode
);
void* handle;
int window_height;
int window_width;
int stride_y;
int stride_x;
int padding_y;
int padding_x;
bool do_max_pooling;
};
// ------------------------------------------------------------------------------------
void softmax (
tensor& dest,
const tensor& src
);
/*!
requires
- have_same_dimensions(dest, src) == true
ensures
- Note that the softmax function is a vector valued function:
s(x) == exp(x)/sum(exp(x))
- Computes the softmax function on src and writes the results to dest. The
softmax is computed per spatial location across the different channels at
each location. That is, softmax() outputs a new tensor, #dest, where
each of the spatial locations in dest (i.e. image idx, row idx, and
column idx) contains the output of s() evaluated over the channel values
at each location.
- This function supports in-place operation, i.e. having
is_same_object(dest, src)==true
!*/
void softmax_gradient (
tensor& grad,
const tensor& dest,
const tensor& gradient_input
);
/*!
requires
- have_same_dimensions(dest,gradient_input) == true
- have_same_dimensions(dest,grad) == true
- is_same_object(grad, dest)==false
ensures
- We interpret dest as the output of softmax(dest,SRC) for some SRC tensor.
Then let f(SRC) == dot(gradient_input,dest) Then this function computes
the gradient of f() with respect to SRC and assigns it to grad.
- This function supports in-place operation, i.e. having
is_same_object(grad, gradient_input)==true
!*/
// ------------------------------------------------------------------------------------
void sigmoid (
tensor& dest,
const tensor& src
);
/*!
requires
- have_same_dimensions(dest, src) == true
ensures
- for all valid i:
- #dest.host()[i] == 1/(1+std::exp(-src.host()[i]))
- This function supports in-place operation, i.e. having
is_same_object(dest, src)==true
!*/
void sigmoid_gradient (
tensor& grad,
const tensor& dest,
const tensor& gradient_input
);
/*!
requires
- have_same_dimensions(dest,gradient_input) == true
- have_same_dimensions(dest,grad) == true
- is_same_object(grad,dest) == false
ensures
- Recalling that dest is the output of sigmoid(dest,SRC) for some SRC tensor,
let f(SRC) == dot(gradient_input,dest)
- Then this function computes the gradient of f() with respect to SRC and
assigns it to grad.
- This function supports in-place operation, i.e. having
is_same_object(grad, gradient_input)==true
!*/
// ------------------------------------------------------------------------------------
void relu (
tensor& dest,
const tensor& src
);
/*!
requires
- have_same_dimensions(dest, src) == true
ensures
- for all valid i:
- #dest.host()[i] == std::max(0,src.host()[i])
- This function supports in-place operation, i.e. having
is_same_object(dest, src)==true
!*/
void relu_gradient (
tensor& grad,
const tensor& dest,
const tensor& gradient_input
);
/*!
requires
- have_same_dimensions(dest,gradient_input) == true
- have_same_dimensions(dest,grad) == true
- is_same_object(grad,dest) == false
ensures
- Recalling that dest is the output of relu(dest,SRC) for some SRC tensor,
let f(SRC) == dot(gradient_input,dest)
- Then this function computes the gradient of f() with respect to SRC and
assigns it to grad.
- This function supports in-place operation, i.e. having
is_same_object(grad, gradient_input)==true
!*/
// ------------------------------------------------------------------------------------
void tanh (
tensor& dest,
const tensor& src
);
/*!
requires
- have_same_dimensions(dest, src) == true
ensures
- for all valid i:
- #dest.host()[i] == std::tanh(src.host()[i])
- This function supports in-place operation, i.e. having
is_same_object(dest, src)==true
!*/
void tanh_gradient (
tensor& grad,
const tensor& dest,
const tensor& gradient_input
);
/*!
requires
- have_same_dimensions(dest,gradient_input) == true
- have_same_dimensions(dest,grad) == true
- is_same_object(grad,dest) == false
ensures
- Recalling that dest is the output of tanh(dest,SRC) for some SRC tensor,
let f(SRC) == dot(gradient_input,dest)
- Then this function computes the gradient of f() with respect to SRC and
assigns it to grad.
- This function supports in-place operation, i.e. having
is_same_object(grad, gradient_input)==true
!*/
// ------------------------------------------------------------------------------------
}
}
#endif // DLIB_USE_CUDA
#endif // DLIB_DNN_CuDNN_H_
| tunaemre/Face-Swap-Android | faceSwap/src/main/jni/dlib/dnn/cudnn_dlibapi.h | C | apache-2.0 | 19,622 |
#pragma once
#include "platform/network_policy.hpp"
@class NSDate;
namespace network_policy
{
enum Stage
{
Ask,
Always,
Never,
Today,
NotToday
};
void SetStage(Stage state);
Stage GetStage();
bool CanUseNetwork();
bool IsActivePolicyDate();
NSDate* GetPolicyDate();
} // namespace network_policy
| rokuz/omim | platform/network_policy_ios.h | C | apache-2.0 | 312 |
/*
* Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
*/
#define __BGP_IP_TEST_WRAPPER_TEST_SUITE__
#include "bgp_ip_test.cc"
int main(int argc, char **argv) {
const char *largv[] = {
__FILE__,
"--nexthop-address-family=inet6",
};
return bgp_ip_test_main(sizeof(largv)/sizeof(largv[0]), largv);
}
| tcpcloud/contrail-controller | src/bgp/test/bgp_ip_test2.cc | C++ | apache-2.0 | 343 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.sql.planner.assertions;
import com.google.common.collect.ImmutableList;
import io.trino.Session;
import io.trino.metadata.Metadata;
import io.trino.sql.parser.ParsingOptions;
import io.trino.sql.parser.SqlParser;
import io.trino.sql.planner.Symbol;
import io.trino.sql.planner.plan.ApplyNode;
import io.trino.sql.planner.plan.PlanNode;
import io.trino.sql.planner.plan.ProjectNode;
import io.trino.sql.tree.Expression;
import io.trino.sql.tree.InPredicate;
import io.trino.sql.tree.SymbolReference;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import static com.google.common.base.Preconditions.checkState;
import static io.trino.sql.ExpressionUtils.rewriteIdentifiersToSymbolReferences;
import static java.util.Objects.requireNonNull;
public class ExpressionMatcher
implements RvalueMatcher
{
private final String sql;
private final Expression expression;
public ExpressionMatcher(String expression)
{
this.sql = requireNonNull(expression, "expression is null");
this.expression = expression(expression);
}
public ExpressionMatcher(Expression expression)
{
this.expression = requireNonNull(expression, "expression is null");
this.sql = expression.toString();
}
private Expression expression(String sql)
{
SqlParser parser = new SqlParser();
return rewriteIdentifiersToSymbolReferences(parser.createExpression(sql, new ParsingOptions()));
}
public static ExpressionMatcher inPredicate(SymbolReference value, SymbolReference valueList)
{
return new ExpressionMatcher(new InPredicate(value, valueList));
}
@Override
public Optional<Symbol> getAssignedSymbol(PlanNode node, Session session, Metadata metadata, SymbolAliases symbolAliases)
{
Optional<Symbol> result = Optional.empty();
ImmutableList.Builder<Expression> matchesBuilder = ImmutableList.builder();
Map<Symbol, Expression> assignments = getAssignments(node);
if (assignments == null) {
return result;
}
ExpressionVerifier verifier = new ExpressionVerifier(symbolAliases);
for (Map.Entry<Symbol, Expression> assignment : assignments.entrySet()) {
if (verifier.process(assignment.getValue(), expression)) {
result = Optional.of(assignment.getKey());
matchesBuilder.add(assignment.getValue());
}
}
List<Expression> matches = matchesBuilder.build();
checkState(matches.size() < 2, "Ambiguous expression %s matches multiple assignments", expression,
(matches.stream().map(Expression::toString).collect(Collectors.joining(", "))));
return result;
}
private static Map<Symbol, Expression> getAssignments(PlanNode node)
{
if (node instanceof ProjectNode) {
ProjectNode projectNode = (ProjectNode) node;
return projectNode.getAssignments().getMap();
}
else if (node instanceof ApplyNode) {
ApplyNode applyNode = (ApplyNode) node;
return applyNode.getSubqueryAssignments().getMap();
}
else {
return null;
}
}
@Override
public String toString()
{
return sql;
}
}
| electrum/presto | core/trino-main/src/test/java/io/trino/sql/planner/assertions/ExpressionMatcher.java | Java | apache-2.0 | 3,920 |
require 'spec_helper'
describe 'collectd::plugin::swap', :type => :class do
context ':ensure => present, default params' do
let :facts do
{:osfamily => 'RedHat'}
end
it 'Will create /etc/collectd.d/10-swap.conf' do
should contain_file('swap.load').with({
:ensure => 'present',
:path => '/etc/collectd.d/10-swap.conf',
:content => /\#\ Generated by Puppet\nLoadPlugin swap\n\n<Plugin swap>\n ReportByDevice false\n<\/Plugin>\n/,
})
end
end
context ':ensure => present, specific params, collectd version 5.0' do
let :facts do
{ :osfamily => 'Redhat',
:collectd_version => '5.0'
}
end
it 'Will create /etc/collectd.d/10-swap.conf for collectd < 5.2' do
should contain_file('swap.load').with({
:ensure => 'present',
:path => '/etc/collectd.d/10-swap.conf',
:content => "# Generated by Puppet\nLoadPlugin swap\n\n<Plugin swap>\n ReportByDevice false\n</Plugin>\n",
})
end
end
context ':ensure => present, specific params, collectd version 5.2.0' do
let :facts do
{ :osfamily => 'Redhat',
:collectd_version => '5.2.0'
}
end
it 'Will create /etc/collectd.d/10-swap.conf for collectd >= 5.2' do
should contain_file('swap.load').with({
:ensure => 'present',
:path => '/etc/collectd.d/10-swap.conf',
:content => "# Generated by Puppet\nLoadPlugin swap\n\n<Plugin swap>\n ReportByDevice false\n ReportBytes true\n</Plugin>\n",
})
end
end
context ':ensure => absent' do
let :facts do
{:osfamily => 'RedHat'}
end
let :params do
{:ensure => 'absent'}
end
it 'Will not create /etc/collectd.d/10-swap.conf' do
should contain_file('swap.load').with({
:ensure => 'absent',
:path => '/etc/collectd.d/10-swap.conf',
})
end
end
end
| apache/infrastructure-puppet | modules/collectd/spec/classes/collectd_plugin_swap_spec.rb | Ruby | apache-2.0 | 1,936 |
// Copyright 2010 The Bazel Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.testing.junit.runner.sharding.testing;
import static com.google.common.truth.Truth.assertThat;
import com.google.testing.junit.runner.sharding.api.ShardingFilterFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import junit.framework.TestCase;
import org.junit.Test;
import org.junit.runner.Description;
import org.junit.runner.manipulation.Filter;
/**
* Common base class for all sharding filter tests.
*/
public abstract class ShardingFilterTestCase extends TestCase {
static final List<Description> TEST_DESCRIPTIONS = createGenericTestCaseDescriptions(6);
/**
* Returns a filter of the subclass type using the given descriptions,
* shard index, and total number of shards.
*/
protected abstract ShardingFilterFactory createShardingFilterFactory();
public final void testShardingIsCompleteAndPartitioned_oneShard() {
assertShardingIsCompleteAndPartitioned(createFilters(TEST_DESCRIPTIONS, 1), TEST_DESCRIPTIONS);
}
public final void testShardingIsStable_oneShard() {
assertShardingIsStable(createFilters(TEST_DESCRIPTIONS, 1), TEST_DESCRIPTIONS);
}
public final void testShardingIsCompleteAndPartitioned_moreTestsThanShards() {
assertShardingIsCompleteAndPartitioned(createFilters(TEST_DESCRIPTIONS, 5), TEST_DESCRIPTIONS);
}
public final void testShardingIsStable_moreTestsThanShards() {
assertShardingIsStable(createFilters(TEST_DESCRIPTIONS, 5), TEST_DESCRIPTIONS);
}
public final void testShardingIsCompleteAndPartitioned_sameNumberOfTestsAndShards() {
assertShardingIsCompleteAndPartitioned(createFilters(TEST_DESCRIPTIONS, 6), TEST_DESCRIPTIONS);
}
public final void testShardingIsStable_sameNumberOfTestsAndShards() {
assertShardingIsStable(createFilters(TEST_DESCRIPTIONS, 6), TEST_DESCRIPTIONS);
}
public final void testShardingIsCompleteAndPartitioned_moreShardsThanTests() {
assertShardingIsCompleteAndPartitioned(createFilters(TEST_DESCRIPTIONS, 7), TEST_DESCRIPTIONS);
}
public final void testShardingIsStable_moreShardsThanTests() {
assertShardingIsStable(createFilters(TEST_DESCRIPTIONS, 7), TEST_DESCRIPTIONS);
}
public final void testShardingIsCompleteAndPartitioned_duplicateDescriptions() {
List<Description> descriptions = new ArrayList<>();
descriptions.addAll(createGenericTestCaseDescriptions(6));
descriptions.addAll(createGenericTestCaseDescriptions(6));
assertShardingIsCompleteAndPartitioned(createFilters(descriptions, 7), descriptions);
}
public final void testShardingIsStable_duplicateDescriptions() {
List<Description> descriptions = new ArrayList<>();
descriptions.addAll(createGenericTestCaseDescriptions(6));
descriptions.addAll(createGenericTestCaseDescriptions(6));
assertShardingIsStable(createFilters(descriptions, 7), descriptions);
}
public final void testShouldRunTestSuite() {
Description testSuiteDescription = createTestSuiteDescription();
Filter filter = createShardingFilterFactory().createFilter(TEST_DESCRIPTIONS, 0, 1);
assertThat(filter.shouldRun(testSuiteDescription)).isTrue();
}
/**
* Creates a list of generic test case descriptions.
*
* @param numDescriptions the number of generic test descriptions to add to the list.
*/
public static List<Description> createGenericTestCaseDescriptions(int numDescriptions) {
List<Description> descriptions = new ArrayList<>();
for (int i = 0; i < numDescriptions; i++) {
descriptions.add(Description.createTestDescription(Test.class, "test" + i));
}
return descriptions;
}
protected static final List<Filter> createFilters(List<Description> descriptions, int numShards,
ShardingFilterFactory factory) {
List<Filter> filters = new ArrayList<>();
for (int shardIndex = 0; shardIndex < numShards; shardIndex++) {
filters.add(factory.createFilter(descriptions, shardIndex, numShards));
}
return filters;
}
protected final List<Filter> createFilters(List<Description> descriptions, int numShards) {
return createFilters(descriptions, numShards, createShardingFilterFactory());
}
protected static void assertThrowsExceptionForUnknownDescription(Filter filter) {
try {
filter.shouldRun(Description.createTestDescription(Object.class, "unknown"));
fail("expected thrown exception");
} catch (IllegalArgumentException expected) { }
}
/**
* Simulates test sharding with the given filters and test descriptions.
*
* @param filters a list of filters, one per test shard
* @param descriptions a list of test descriptions
* @return a mapping from each filter to the descriptions of the tests that would be run
* by the shard associated with that filter.
*/
protected static Map<Filter, List<Description>> simulateTestRun(List<Filter> filters,
List<Description> descriptions) {
Map<Filter, List<Description>> descriptionsRun = new HashMap<>();
for (Filter filter : filters) {
for (Description description : descriptions) {
if (filter.shouldRun(description)) {
addDescriptionForFilterToMap(descriptionsRun, filter, description);
}
}
}
return descriptionsRun;
}
/**
* Simulates test sharding with the given filters and test descriptions, for a
* set of test descriptions that is in a different order in every test shard.
*
* @param filters a list of filters, one per test shard
* @param descriptions a list of test descriptions
* @return a mapping from each filter to the descriptions of the tests that would be run
* by the shard associated with that filter.
*/
protected static Map<Filter, List<Description>> simulateSelfRandomizingTestRun(
List<Filter> filters, List<Description> descriptions) {
if (descriptions.isEmpty()) {
return new HashMap<>();
}
Deque<Description> mutatingDescriptions = new LinkedList<>(descriptions);
Map<Filter, List<Description>> descriptionsRun = new HashMap<>();
for (Filter filter : filters) {
// rotate the queue so that each filter gets the descriptions in a different order
mutatingDescriptions.addLast(mutatingDescriptions.pollFirst());
for (Description description : descriptions) {
if (filter.shouldRun(description)) {
addDescriptionForFilterToMap(descriptionsRun, filter, description);
}
}
}
return descriptionsRun;
}
/**
* Creates a test suite description (a Description that returns true
* when {@link org.junit.runner.Description#isSuite()} is called.)
*/
protected static Description createTestSuiteDescription() {
Description testSuiteDescription = Description.createSuiteDescription("testSuite");
testSuiteDescription.addChild(Description.createSuiteDescription("testCase"));
return testSuiteDescription;
}
/**
* Tests that the sharding is complete (each test is run at least once) and
* partitioned (each test is run at most once) -- in other words, that
* each test is run exactly once. This is a requirement of all test
* sharding functions.
*/
protected static void assertShardingIsCompleteAndPartitioned(List<Filter> filters,
List<Description> descriptions) {
Map<Filter, List<Description>> run = simulateTestRun(filters, descriptions);
assertThatCollectionContainsExactlyElementsInList(getAllValuesInMap(run), descriptions);
run = simulateSelfRandomizingTestRun(filters, descriptions);
assertThatCollectionContainsExactlyElementsInList(getAllValuesInMap(run), descriptions);
}
/**
* Tests that sharding is stable for the given filters, regardless of the
* ordering of the descriptions. This is useful for verifying that sharding
* works with self-randomizing test suites, and a requirement of all test
* sharding functions.
*/
protected static void assertShardingIsStable(
List<Filter> filters, List<Description> descriptions) {
Map<Filter, List<Description>> run1 = simulateTestRun(filters, descriptions);
Map<Filter, List<Description>> run2 = simulateTestRun(filters, descriptions);
assertThat(run2).isEqualTo(run1);
Map<Filter, List<Description>> randomizedRun1 =
simulateSelfRandomizingTestRun(filters, descriptions);
Map<Filter, List<Description>> randomizedRun2 =
simulateSelfRandomizingTestRun(filters, descriptions);
assertThat(randomizedRun2).isEqualTo(randomizedRun1);
}
private static void addDescriptionForFilterToMap(
Map<Filter, List<Description>> descriptionsRun, Filter filter, Description description) {
List<Description> descriptions = descriptionsRun.get(filter);
if (descriptions == null) {
descriptions = new ArrayList<>();
descriptionsRun.put(filter, descriptions);
}
descriptions.add(description);
}
private static Collection<Description> getAllValuesInMap(Map<Filter, List<Description>> map) {
Collection<Description> allDescriptions = new ArrayList<>();
for (List<Description> descriptions : map.values()) {
allDescriptions.addAll(descriptions);
}
return allDescriptions;
}
/**
* Returns whether the Collection and the List contain exactly the same elements with the same
* frequency, ignoring the ordering.
*/
private static void assertThatCollectionContainsExactlyElementsInList(
Collection<Description> actual, List<Description> expectedDescriptions) {
String basicAssertionMessage = "Elements of collection " + actual + " are not the same as the "
+ "elements of expected list " + expectedDescriptions + ". ";
if (actual.size() != expectedDescriptions.size()) {
throw new AssertionError(basicAssertionMessage + "The number of elements is different.");
}
List<Description> actualDescriptions = new ArrayList<Description>(actual);
// Keeps track of already reviewed descriptions, so they won't be checked again when next
// encountered.
// Note: this algorithm has O(n^2) time complexity and will be slow for large inputs.
Set<Description> reviewedDescriptions = new HashSet<>();
for (int i = 0; i < actual.size(); i++) {
Description currDescription = actualDescriptions.get(i);
// If already reviewed, skip.
if (reviewedDescriptions.contains(currDescription)) {
continue;
}
int actualFreq = 0;
int expectedFreq = 0;
// Count the frequency of the current description in both lists.
for (int j = 0; j < actual.size(); j++) {
if (currDescription.equals(actualDescriptions.get(j))) {
actualFreq++;
}
if (currDescription.equals(expectedDescriptions.get(j))) {
expectedFreq++;
}
}
if (actualFreq < expectedFreq) {
throw new AssertionError(basicAssertionMessage + "There are " + (expectedFreq - actualFreq)
+ " missing occurrences of " + currDescription + ".");
} else if (actualFreq > expectedFreq) {
throw new AssertionError(basicAssertionMessage + "There are " + (actualFreq - expectedFreq)
+ " unexpected occurrences of " + currDescription + ".");
}
reviewedDescriptions.add(currDescription);
}
}
}
| damienmg/bazel | src/java_tools/junitrunner/java/com/google/testing/junit/runner/sharding/testing/ShardingFilterTestCase.java | Java | apache-2.0 | 12,016 |
#!/usr/bin/env bash
lwjreOSX/bin/java -jar libs/solDesktop.jar
| Cervator/DestinationSol | launcher/solOSX.sh | Shell | apache-2.0 | 63 |
#pragma checksum "..\..\App.xaml" "{406ea660-64cf-4c82-b6f0-42d48172a799}" "5A325D8D6480F61FD53D1D80763A1895"
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
using Calculator;
using System;
using System.Diagnostics;
using System.Windows;
using System.Windows.Automation;
using System.Windows.Controls;
using System.Windows.Controls.Primitives;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Ink;
using System.Windows.Input;
using System.Windows.Markup;
using System.Windows.Media;
using System.Windows.Media.Animation;
using System.Windows.Media.Effects;
using System.Windows.Media.Imaging;
using System.Windows.Media.Media3D;
using System.Windows.Media.TextFormatting;
using System.Windows.Navigation;
using System.Windows.Shapes;
using System.Windows.Shell;
namespace Calculator {
/// <summary>
/// App
/// </summary>
public partial class App : System.Windows.Application {
/// <summary>
/// InitializeComponent
/// </summary>
[System.Diagnostics.DebuggerNonUserCodeAttribute()]
[System.CodeDom.Compiler.GeneratedCodeAttribute("PresentationBuildTasks", "4.0.0.0")]
public void InitializeComponent() {
#line 5 "..\..\App.xaml"
this.StartupUri = new System.Uri("MainWindow.xaml", System.UriKind.Relative);
#line default
#line hidden
}
/// <summary>
/// Application Entry Point.
/// </summary>
[System.STAThreadAttribute()]
[System.Diagnostics.DebuggerNonUserCodeAttribute()]
[System.CodeDom.Compiler.GeneratedCodeAttribute("PresentationBuildTasks", "4.0.0.0")]
public static void Main() {
Calculator.App app = new Calculator.App();
app.InitializeComponent();
app.Run();
}
}
}
| 00marco/Scientific-Calculator-Midterm-project- | Scientific-Calculator-Midterm-project-7/Calculator/Calculator/obj/Debug/App.g.i.cs | C# | apache-2.0 | 2,312 |
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.service;
import java.util.Objects;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ComparisonChain;
import org.onosproject.store.Timestamp;
import static com.google.common.base.Preconditions.checkArgument;
/**
* Logical timestamp for versions.
* <p>
* The version is a logical timestamp that represents a point in logical time at which an event occurs.
* This is used in both pessimistic and optimistic locking protocols to ensure that the state of a shared resource
* has not changed at the end of a transaction.
*/
public class Version implements Timestamp {
private final long version;
public Version(long version) {
this.version = version;
}
@Override
public int compareTo(Timestamp o) {
checkArgument(o instanceof Version,
"Must be LockVersion", o);
Version that = (Version) o;
return ComparisonChain.start()
.compare(this.version, that.version)
.result();
}
@Override
public int hashCode() {
return Long.hashCode(version);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Version)) {
return false;
}
Version that = (Version) obj;
return Objects.equals(this.version, that.version);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("version", version)
.toString();
}
/**
* Returns the lock version.
*
* @return the lock version
*/
public long value() {
return this.version;
}
} | gkatsikas/onos | core/api/src/main/java/org/onosproject/store/service/Version.java | Java | apache-2.0 | 2,369 |
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.nativeplatform.internal;
import org.gradle.api.file.FileCollection;
import org.gradle.api.internal.file.FileCollectionFactory;
import org.gradle.language.nativeplatform.DependentSourceSet;
import org.gradle.nativeplatform.*;
import org.gradle.nativeplatform.internal.resolve.NativeBinaryRequirementResolveResult;
import org.gradle.nativeplatform.internal.resolve.NativeDependencyResolver;
import org.gradle.nativeplatform.platform.NativePlatform;
import org.gradle.nativeplatform.toolchain.NativeToolChain;
import org.gradle.nativeplatform.toolchain.internal.PlatformToolProvider;
import org.gradle.nativeplatform.toolchain.internal.PreCompiledHeader;
import org.gradle.platform.base.internal.BinarySpecInternal;
import java.io.File;
import java.util.Collection;
import java.util.Map;
public interface NativeBinarySpecInternal extends NativeBinarySpec, BinarySpecInternal {
void setFlavor(Flavor flavor);
void setToolChain(NativeToolChain toolChain);
void setTargetPlatform(NativePlatform targetPlatform);
void setBuildType(BuildType buildType);
Tool getToolByName(String name);
PlatformToolProvider getPlatformToolProvider();
void setPlatformToolProvider(PlatformToolProvider toolProvider);
void setResolver(NativeDependencyResolver resolver);
void setFileCollectionFactory(FileCollectionFactory fileCollectionFactory);
File getPrimaryOutput();
Collection<NativeDependencySet> getLibs(DependentSourceSet sourceSet);
Collection<NativeLibraryBinary> getDependentBinaries();
/**
* Adds some files to include as input to the link/assemble step of this binary.
*/
void binaryInputs(FileCollection files);
Collection<NativeBinaryRequirementResolveResult> getAllResolutions();
Map<File, PreCompiledHeader> getPrefixFileToPCH();
void addPreCompiledHeaderFor(DependentSourceSet sourceSet);
}
| gstevey/gradle | subprojects/platform-native/src/main/java/org/gradle/nativeplatform/internal/NativeBinarySpecInternal.java | Java | apache-2.0 | 2,517 |
/*
* Copyright 2012 Amadeus s.a.s.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
Aria.classDefinition({
$classpath : "test.aria.storage.localStorage.NamespaceTestCase",
$dependencies : ["aria.storage.LocalStorage"],
$extends : "test.aria.storage.base.GeneralNamespaceBase",
$constructor : function () {
this.storageLocation = "localStorage";
this.$GeneralNamespaceBase.constructor.call(this);
},
$prototype : {
/**
* Check what happens when you use namespaces, one namespaced shouldn't affect the others
*/
testNamespaceAPI : function () {
if (this.canRunHTML5Tests(false) || this.canRunUserDataTests()) {
this.$GeneralNamespaceBase.testNamespaceAPI.call(this);
}
},
/**
* Verify if the events are raised correctly
*/
testNamespaceEvents : function () {
if (this.canRunHTML5Tests(false) || this.canRunUserDataTests()) {
this.$GeneralNamespaceBase.testNamespaceEvents.call(this);
}
}
}
});
| vcarle/ariatemplates | test/aria/storage/localStorage/NamespaceTestCase.js | JavaScript | apache-2.0 | 1,613 |
<?php
/* +***********************************************************************************
* The contents of this file are subject to the vtiger CRM Public License Version 1.0
* ("License"); You may not use this file except in compliance with the License
* The Original Code is: vtiger CRM Open Source
* The Initial Developer of the Original Code is vtiger.
* Portions created by vtiger are Copyright (C) vtiger.
* All Rights Reserved.
* *********************************************************************************** */
class Project_RelationListView_Model extends Vtiger_RelationListView_Model {
public function getCreateViewUrl() {
$createViewUrl = parent::getCreateViewUrl();
$relationModuleModel = $this->getRelationModel()->getRelationModuleModel();
if($relationModuleModel->getName() == 'HelpDesk') {
if($relationModuleModel->getField('parent_id')->isViewable()) {
$createViewUrl .='&parent_id='.$this->getParentRecordModel()->get('linktoaccountscontacts');
}
}
return $createViewUrl;
}
}
| basiljose1/byjcrm | pkg/vtiger/modules/Projects/Project/modules/Project/models/RelationListView.php | PHP | apache-2.0 | 1,041 |
using System.ComponentModel.DataAnnotations;
using FluentMigrator.Infrastructure;
namespace FluentMigrator.Expressions
{
/// <summary>
/// Expression to delete a sequence
/// </summary>
public class DeleteSequenceExpression : MigrationExpressionBase, ISchemaExpression
{
/// <inheritdoc />
public virtual string SchemaName { get; set; }
/// <summary>
/// Gets or sets the sequence name
/// </summary>
[Required(ErrorMessageResourceType = typeof(ErrorMessages), ErrorMessageResourceName = nameof(ErrorMessages.SequenceNameCannotBeNullOrEmpty))]
public virtual string SequenceName { get; set; }
/// <inheritdoc />
public override void ExecuteWith(IMigrationProcessor processor)
{
processor.Process(this);
}
/// <inheritdoc />
public override string ToString()
{
return base.ToString() + SequenceName;
}
}
}
| schambers/fluentmigrator | src/FluentMigrator.Abstractions/Expressions/DeleteSequenceExpression.cs | C# | apache-2.0 | 978 |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.Serialization.Formatters;
using System.Threading;
using System.Threading.Tasks;
using Hyak.Common;
using Microsoft.Azure.Commands.Resources.Models.Authorization;
using Microsoft.Azure.Commands.Tags.Model;
using Microsoft.Azure.Common.Authentication;
using Microsoft.Azure.Common.Authentication.Models;
using Microsoft.Azure.Management.Authorization;
using Microsoft.Azure.Management.Authorization.Models;
using Microsoft.Azure.Management.Resources;
using Microsoft.Azure.Management.Resources.Models;
using Microsoft.WindowsAzure.Commands.Common;
using Microsoft.WindowsAzure.Commands.Utilities.Common;
using Newtonsoft.Json;
using ProjectResources = Microsoft.Azure.Commands.Resources.Properties.Resources;
namespace Microsoft.Azure.Commands.Resources.Models
{
public partial class ResourcesClient
{
/// <summary>
/// A string that indicates the value of the resource type name for the RP's operations api
/// </summary>
public const string Operations = "operations";
/// <summary>
/// A string that indicates the value of the registering state enum for a provider
/// </summary>
public const string RegisteredStateName = "Registered";
/// <summary>
/// Used when provisioning the deployment status.
/// </summary>
private List<DeploymentOperation> operations;
public IResourceManagementClient ResourceManagementClient { get; set; }
public IAuthorizationManagementClient AuthorizationManagementClient { get; set; }
public GalleryTemplatesClient GalleryTemplatesClient { get; set; }
// TODO: http://vstfrd:8080/Azure/RD/_workitems#_a=edit&id=3247094
//public IEventsClient EventsClient { get; set; }
public Action<string> VerboseLogger { get; set; }
public Action<string> ErrorLogger { get; set; }
public Action<string> WarningLogger { get; set; }
/// <summary>
/// Creates new ResourceManagementClient
/// </summary>
/// <param name="profile">Profile containing resources to manipulate</param>
public ResourcesClient(AzureProfile profile)
: this(
AzureSession.ClientFactory.CreateClient<ResourceManagementClient>(profile, AzureEnvironment.Endpoint.ResourceManager),
new GalleryTemplatesClient(profile.Context),
// TODO: http://vstfrd:8080/Azure/RD/_workitems#_a=edit&id=3247094
//AzureSession.ClientFactory.CreateClient<EventsClient>(context, AzureEnvironment.Endpoint.ResourceManager),
AzureSession.ClientFactory.CreateClient<AuthorizationManagementClient>(profile.Context, AzureEnvironment.Endpoint.ResourceManager))
{
}
/// <summary>
/// Creates new ResourcesClient instance
/// </summary>
/// <param name="resourceManagementClient">The IResourceManagementClient instance</param>
/// <param name="galleryTemplatesClient">The IGalleryClient instance</param>
/// <param name="authorizationManagementClient">The management client instance</param>
public ResourcesClient(
IResourceManagementClient resourceManagementClient,
GalleryTemplatesClient galleryTemplatesClient,
// TODO: http://vstfrd:8080/Azure/RD/_workitems#_a=edit&id=3247094
//IEventsClient eventsClient,
IAuthorizationManagementClient authorizationManagementClient)
{
GalleryTemplatesClient = galleryTemplatesClient;
// TODO: http://vstfrd:8080/Azure/RD/_workitems#_a=edit&id=3247094
//EventsClient = eventsClient;
AuthorizationManagementClient = authorizationManagementClient;
this.ResourceManagementClient = resourceManagementClient;
}
/// <summary>
/// Parameterless constructor for mocking
/// </summary>
public ResourcesClient()
{
}
private string GetDeploymentParameters(Hashtable templateParameterObject)
{
if (templateParameterObject != null)
{
return SerializeHashtable(templateParameterObject, addValueLayer: true);
}
else
{
return null;
}
}
public string SerializeHashtable(Hashtable templateParameterObject, bool addValueLayer)
{
if (templateParameterObject == null)
{
return null;
}
Dictionary<string, object> parametersDictionary = templateParameterObject.ToDictionary(addValueLayer);
return JsonConvert.SerializeObject(parametersDictionary, new JsonSerializerSettings
{
TypeNameAssemblyFormat = FormatterAssemblyStyle.Simple,
TypeNameHandling = TypeNameHandling.None,
Formatting = Formatting.Indented
});
}
public virtual PSResourceProvider UnregisterProvider(string providerName)
{
var response = this.ResourceManagementClient.Providers.Unregister(providerName);
if (response.Provider == null)
{
throw new KeyNotFoundException(string.Format(ProjectResources.ResourceProviderUnregistrationFailed, providerName));
}
return response.Provider.ToPSResourceProvider();
}
private string GetTemplate(string templateFile, string galleryTemplateName)
{
string template;
if (!string.IsNullOrEmpty(templateFile))
{
if (Uri.IsWellFormedUriString(templateFile, UriKind.Absolute))
{
template = GeneralUtilities.DownloadFile(templateFile);
}
else
{
template = FileUtilities.DataStore.ReadFileAsText(templateFile);
}
}
else
{
Debug.Assert(!string.IsNullOrEmpty(galleryTemplateName));
string templateUri = GalleryTemplatesClient.GetGalleryTemplateFile(galleryTemplateName);
template = GeneralUtilities.DownloadFile(templateUri);
}
return template;
}
private ResourceGroupExtended CreateOrUpdateResourceGroup(string name, string location, Hashtable[] tags)
{
Dictionary<string, string> tagDictionary = TagsConversionHelper.CreateTagDictionary(tags, validate: true);
var result = ResourceManagementClient.ResourceGroups.CreateOrUpdate(name,
new ResourceGroup
{
Location = location,
Tags = tagDictionary
});
return result.ResourceGroup;
}
private void WriteVerbose(string progress)
{
if (VerboseLogger != null)
{
VerboseLogger(progress);
}
}
private void WriteWarning(string warning)
{
if (WarningLogger != null)
{
WarningLogger(warning);
}
}
private void WriteError(string error)
{
if (ErrorLogger != null)
{
ErrorLogger(error);
}
}
private DeploymentExtended ProvisionDeploymentStatus(string resourceGroup, string deploymentName, Deployment deployment)
{
operations = new List<DeploymentOperation>();
return WaitDeploymentStatus(
resourceGroup,
deploymentName,
deployment,
WriteDeploymentProgress,
ProvisioningState.Canceled,
ProvisioningState.Succeeded,
ProvisioningState.Failed);
}
private void WriteDeploymentProgress(string resourceGroup, string deploymentName, Deployment deployment)
{
const string normalStatusFormat = "Resource {0} '{1}' provisioning status is {2}";
const string failureStatusFormat = "Resource {0} '{1}' failed with message '{2}'";
List<DeploymentOperation> newOperations;
DeploymentOperationsListResult result;
result = ResourceManagementClient.DeploymentOperations.List(resourceGroup, deploymentName, null);
newOperations = GetNewOperations(operations, result.Operations);
operations.AddRange(newOperations);
while (!string.IsNullOrEmpty(result.NextLink))
{
result = ResourceManagementClient.DeploymentOperations.ListNext(result.NextLink);
newOperations = GetNewOperations(operations, result.Operations);
operations.AddRange(newOperations);
}
foreach (DeploymentOperation operation in newOperations)
{
string statusMessage;
if (operation.Properties.ProvisioningState != ProvisioningState.Failed)
{
statusMessage = string.Format(normalStatusFormat,
operation.Properties.TargetResource.ResourceType,
operation.Properties.TargetResource.ResourceName,
operation.Properties.ProvisioningState.ToLower());
WriteVerbose(statusMessage);
}
else
{
string errorMessage = ParseErrorMessage(operation.Properties.StatusMessage);
statusMessage = string.Format(failureStatusFormat,
operation.Properties.TargetResource.ResourceType,
operation.Properties.TargetResource.ResourceName,
errorMessage);
WriteError(statusMessage);
}
}
}
public static string ParseErrorMessage(string statusMessage)
{
CloudError error = CloudException.ParseXmlOrJsonError(statusMessage);
if (error.Message == null)
{
return error.OriginalMessage;
}
else
{
return error.Message;
}
}
private DeploymentExtended WaitDeploymentStatus(
string resourceGroup,
string deploymentName,
Deployment basicDeployment,
Action<string, string, Deployment> job,
params string[] status)
{
DeploymentExtended deployment;
do
{
if (job != null)
{
job(resourceGroup, deploymentName, basicDeployment);
}
deployment = ResourceManagementClient.Deployments.Get(resourceGroup, deploymentName).Deployment;
Thread.Sleep(2000);
} while (!status.Any(s => s.Equals(deployment.Properties.ProvisioningState, StringComparison.OrdinalIgnoreCase)));
return deployment;
}
private List<DeploymentOperation> GetNewOperations(List<DeploymentOperation> old, IList<DeploymentOperation> current)
{
List<DeploymentOperation> newOperations = new List<DeploymentOperation>();
foreach (DeploymentOperation operation in current)
{
DeploymentOperation operationWithSameIdAndProvisioningState = old.Find(o => o.OperationId.Equals(operation.OperationId) && o.Properties.ProvisioningState.Equals(operation.Properties.ProvisioningState));
if (operationWithSameIdAndProvisioningState == null)
{
newOperations.Add(operation);
}
}
return newOperations;
}
private Deployment CreateBasicDeployment(ValidatePSResourceGroupDeploymentParameters parameters)
{
Deployment deployment = new Deployment
{
Properties = new DeploymentProperties {
Mode = DeploymentMode.Incremental,
Template = GetTemplate(parameters.TemplateFile, parameters.GalleryTemplateIdentity),
Parameters = GetDeploymentParameters(parameters.TemplateParameterObject)
}
};
return deployment;
}
private TemplateValidationInfo CheckBasicDeploymentErrors(string resourceGroup, string deploymentName, Deployment deployment)
{
DeploymentValidateResponse validationResult = ResourceManagementClient.Deployments.Validate(
resourceGroup,
deploymentName,
deployment);
return new TemplateValidationInfo(validationResult);
}
internal List<PSPermission> GetResourceGroupPermissions(string resourceGroup)
{
PermissionGetResult permissionsResult = AuthorizationManagementClient.Permissions.ListForResourceGroup(resourceGroup);
if (permissionsResult != null)
{
return permissionsResult.Permissions.Select(p => p.ToPSPermission()).ToList();
}
return null;
}
internal List<PSPermission> GetResourcePermissions(ResourceIdentifier identity)
{
PermissionGetResult permissionsResult = AuthorizationManagementClient.Permissions.ListForResource(
identity.ResourceGroupName,
identity.ToResourceIdentity());
if (permissionsResult != null)
{
return permissionsResult.Permissions.Select(p => p.ToPSPermission()).ToList();
}
return null;
}
public virtual PSResourceProvider[] ListPSResourceProviders(string providerName = null)
{
return this.ListResourceProviders(providerName: providerName, listAvailable: false)
.Select(provider => provider.ToPSResourceProvider())
.ToArray();
}
public virtual PSResourceProvider[] ListPSResourceProviders(bool listAvailable)
{
return this.ListResourceProviders(providerName: null, listAvailable: listAvailable)
.Select(provider => provider.ToPSResourceProvider())
.ToArray();
}
public virtual List<Provider> ListResourceProviders(string providerName = null, bool listAvailable = true)
{
if (!string.IsNullOrEmpty(providerName))
{
var provider = this.ResourceManagementClient.Providers.Get(providerName).Provider;
if (provider == null)
{
throw new KeyNotFoundException(string.Format(ProjectResources.ResourceProviderNotFound, providerName));
}
return new List<Provider> {provider};
}
else
{
var returnList = new List<Provider>();
var tempResult = this.ResourceManagementClient.Providers.List(null);
returnList.AddRange(tempResult.Providers);
while (!string.IsNullOrWhiteSpace(tempResult.NextLink))
{
tempResult = this.ResourceManagementClient.Providers.ListNext(tempResult.NextLink);
returnList.AddRange(tempResult.Providers);
}
return listAvailable
? returnList
: returnList.Where(this.IsProviderRegistered).ToList();
}
}
private bool IsProviderRegistered(Provider provider)
{
return string.Equals(
ResourcesClient.RegisteredStateName,
provider.RegistrationState,
StringComparison.InvariantCultureIgnoreCase);
}
public PSResourceProvider RegisterProvider(string providerName)
{
var response = this.ResourceManagementClient.Providers.Register(providerName);
if (response.Provider == null)
{
throw new KeyNotFoundException(string.Format(ProjectResources.ResourceProviderRegistrationFailed, providerName));
}
return response.Provider.ToPSResourceProvider();
}
/// <summary>
/// Parses an array of resource ids to extract the resource group name
/// </summary>
/// <param name="resourceIds">An array of resource ids</param>
public ResourceIdentifier[] ParseResourceIds(string[] resourceIds)
{
var splitResourceIds = resourceIds
.Select(resourceId => resourceId.Split(new[] { '/' }, StringSplitOptions.RemoveEmptyEntries))
.ToArray();
if (splitResourceIds.Any(splitResourceId => splitResourceId.Length % 2 != 0 ||
splitResourceId.Length < 8 ||
!string.Equals("subscriptions", splitResourceId[0], StringComparison.InvariantCultureIgnoreCase) ||
!string.Equals("resourceGroups", splitResourceId[2], StringComparison.InvariantCultureIgnoreCase) ||
!string.Equals("providers", splitResourceId[4], StringComparison.InvariantCultureIgnoreCase)))
{
throw new System.Management.Automation.PSArgumentException(ProjectResources.InvalidFormatOfResourceId);
}
return resourceIds
.Distinct(StringComparer.InvariantCultureIgnoreCase)
.Select(resourceId => new ResourceIdentifier(resourceId))
.ToArray();
}
/// <summary>
/// Get a mapping of Resource providers that support the operations API (/operations) to the operations api-version supported for that RP
/// (Current logic is to prefer the latest "non-test' api-version. If there are no such version, choose the latest one)
/// </summary>
public Dictionary<string, string> GetResourceProvidersWithOperationsSupport()
{
PSResourceProvider[] allProviders = this.ListPSResourceProviders(listAvailable: true);
Dictionary<string, string> providersSupportingOperations = new Dictionary<string, string>(StringComparer.InvariantCultureIgnoreCase);
PSResourceProviderResourceType[] providerResourceTypes = null;
foreach (PSResourceProvider provider in allProviders)
{
providerResourceTypes = provider.ResourceTypes;
if (providerResourceTypes != null && providerResourceTypes.Any())
{
PSResourceProviderResourceType operationsResourceType = providerResourceTypes.Where(r => r != null && r.ResourceTypeName == ResourcesClient.Operations).FirstOrDefault();
if (operationsResourceType != null &&
operationsResourceType.ApiVersions != null &&
operationsResourceType.ApiVersions.Any())
{
string[] allowedTestPrefixes = new[] { "-preview", "-alpha", "-beta", "-rc", "-privatepreview" };
List<string> nonTestApiVersions = new List<string>();
foreach (string apiVersion in operationsResourceType.ApiVersions)
{
bool isTestApiVersion = false;
foreach (string testPrefix in allowedTestPrefixes)
{
if (apiVersion.EndsWith(testPrefix, StringComparison.InvariantCultureIgnoreCase))
{
isTestApiVersion = true;
break;
}
}
if(isTestApiVersion == false && !nonTestApiVersions.Contains(apiVersion))
{
nonTestApiVersions.Add(apiVersion);
}
}
if(nonTestApiVersions.Any())
{
string latestNonTestApiVersion = nonTestApiVersions.OrderBy(o => o).Last();
providersSupportingOperations.Add(provider.ProviderNamespace, latestNonTestApiVersion);
}
else
{
providersSupportingOperations.Add(provider.ProviderNamespace, operationsResourceType.ApiVersions.OrderBy(o => o).Last());
}
}
}
}
return providersSupportingOperations;
}
/// <summary>
/// Get the list of resource provider operations for every provider specified by the identities list
/// </summary>
public IList<PSResourceProviderOperation> ListPSProviderOperations(IList<ResourceIdentity> identities)
{
var allProviderOperations = new List<PSResourceProviderOperation>();
Task<ResourceProviderOperationDetailListResult> task;
if(identities != null)
{
foreach (var identity in identities)
{
try
{
task = this.ResourceManagementClient.ResourceProviderOperationDetails.ListAsync(identity);
task.Wait(10000);
// Add operations for this provider.
if (task.IsCompleted)
{
allProviderOperations.AddRange(task.Result.ResourceProviderOperationDetails.Select(op => op.ToPSResourceProviderOperation()));
}
}
catch(AggregateException ae)
{
AggregateException flattened = ae.Flatten();
foreach (Exception inner in flattened.InnerExceptions)
{
// Do nothing for now - this is just a mitigation against one provider which hasn't implemented the operations API correctly
//WriteWarning(inner.ToString());
}
}
}
}
return allProviderOperations;
}
}
} | praveennet/azure-powershell | src/ResourceManager/Resources/Commands.Resources/Models.ResourceGroups/ResourceClient.cs | C# | apache-2.0 | 23,374 |
/*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.core.engine.local;
import java.util.Map;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.engine.OEngineAbstract;
import com.orientechnologies.orient.core.exception.ODatabaseException;
import com.orientechnologies.orient.core.storage.OStorage;
import com.orientechnologies.orient.core.storage.cache.local.O2QCache;
import com.orientechnologies.orient.core.storage.impl.local.paginated.OLocalPaginatedStorage;
/**
* @author Andrey Lomakin
* @since 28.03.13
*/
public class OEngineLocalPaginated extends OEngineAbstract {
public static final String NAME = "plocal";
private final O2QCache readCache;
public OEngineLocalPaginated() {
readCache = new O2QCache(
(long) (OGlobalConfiguration.DISK_CACHE_SIZE.getValueAsLong() * 1024 * 1024 * ((100 - OGlobalConfiguration.DISK_WRITE_CACHE_PART
.getValueAsInteger()) / 100.0)), OGlobalConfiguration.DISK_CACHE_PAGE_SIZE.getValueAsInteger() * 1024, true);
try {
readCache.registerMBean();
} catch (Exception e) {
OLogManager.instance().error(this, "MBean for read cache cannot be registered", e);
}
}
public OStorage createStorage(final String dbName, final Map<String, String> configuration) {
try {
// GET THE STORAGE
return new OLocalPaginatedStorage(dbName, dbName, getMode(configuration), generateStorageId(), readCache);
} catch (Throwable t) {
OLogManager.instance().error(this,
"Error on opening database: " + dbName + ". Current location is: " + new java.io.File(".").getAbsolutePath(), t,
ODatabaseException.class);
}
return null;
}
public String getName() {
return NAME;
}
public boolean isShared() {
return true;
}
@Override
public void shutdown() {
super.shutdown();
readCache.clear();
try {
readCache.unregisterMBean();
} catch (Exception e) {
OLogManager.instance().error(this, "MBean for read cache cannot be unregistered", e);
}
}
}
| sanyaade-g2g-repos/orientdb | core/src/main/java/com/orientechnologies/orient/core/engine/local/OEngineLocalPaginated.java | Java | apache-2.0 | 2,893 |
REM other targets are:
REM 'build'
REM 'test'
REM 'test-integration'
@ECHO OFF
call build test-integration | net-commons/common-logging | build-dev.cmd | Batchfile | apache-2.0 | 113 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.common.api;
@FunctionalInterface
public interface ITreeIndexMetadataFrameFactory {
ITreeIndexMetadataFrame createFrame();
}
| ty1er/incubator-asterixdb | hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexMetadataFrameFactory.java | Java | apache-2.0 | 976 |
// Copyright 2017 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using Google.Api.Gax;
using System;
namespace Google.Cloud.Bigtable.V2
{
/// <summary>
/// A version which uniquely identifies a cell within a column.
/// </summary>
/// <remarks>
/// <para>
/// Note: version values are stored on the server as if they are microseconds since the Unix epoch.
/// However, the server only supports millisecond granularity, so the server only allows microseconds
/// in multiples of 1,000. <see cref="BigtableVersion"/> attempts to hide this complexity by exposing
/// its underlying <see cref="Value"/> in terms of milliseconds, so if desired, a custom versioning
/// scheme of 1, 2, ... can be used rather than 1000, 2000, ... However, access to the underlying
/// microsecond value is still provided via <see cref="Micros"/>.
/// </para>
/// <para>
/// Note: when using ReadModifyWriteRow, modified columns automatically use a server version, which
/// is based on the current timestamp since the Unix epoch. For those columns, other reads and writes
/// should use <see cref="BigtableVersion"/> values constructed from DateTime values, as opposed to
/// using a custom versioning scheme with 64-bit values.
/// </para>
/// </remarks>
public struct BigtableVersion : IComparable, IComparable<BigtableVersion>, IEquatable<BigtableVersion>
{
private const long MillisPerMicro = 1000;
private const long TicksPerMicro = 10;
private const long TicksPerMilli = TicksPerMicro * MillisPerMicro;
// Visible for testing
internal static readonly DateTime UnixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
private long _micros;
private BigtableVersion(long value, bool valueIsMillis)
{
if (valueIsMillis)
{
GaxPreconditions.CheckArgumentRange(value, nameof(value), -1, long.MaxValue / MillisPerMicro);
_micros = value == -1 ? MicrosFromTimestamp(DateTime.UtcNow) : value * MillisPerMicro;
}
else
{
GaxPreconditions.CheckArgumentRange(value, nameof(value), -1, long.MaxValue);
_micros = value;
}
}
/// <summary>
/// Creates a new <see cref="BigtableVersion"/> value from a 64-bit value.
/// </summary>
/// <remarks>
/// <para>
/// Note: version values are stored on the server as if they are microseconds since the Unix epoch.
/// However, the server only supports millisecond granularity, so the server only allows microseconds
/// in multiples of 1,000. <see cref="BigtableVersion"/> attempts to hide this complexity by exposing
/// its underlying <see cref="Value"/> in terms of milliseconds, so if desired, a custom versioning
/// scheme of 1, 2, ... can be used rather than 1000, 2000, ... However, access to the underlying
/// microsecond value is still provided via <see cref="Micros"/>.
/// </para>
/// <para>
/// Note: when using ReadModifyWriteRow, modified columns automatically use a server version, which
/// is based on the current timestamp since the Unix epoch. For those columns, other reads and writes
/// should use <see cref="BigtableVersion"/> values constructed from DateTime values, as opposed to
/// using a custom versioning scheme with 64-bit values.
/// </para>
/// </remarks>
/// <param name="value">
/// The non-negative version value, or -1 to initialize from the milliseconds of DateTime.UtcNow.
/// Must be less than or equal to 9223372036854775.
/// </param>
public BigtableVersion(long value) : this(value, valueIsMillis: true) { }
/// <summary>
/// Creates a new <see cref="BigtableVersion"/> value from the milliseconds of a timestamp since the Unix epoch.
/// </summary>
/// <remarks>
/// <para>
/// Note: version values are stored on the server as if they are microseconds since the Unix epoch.
/// However, the server only supports millisecond granularity, so the server only allows microseconds
/// in multiples of 1,000. <see cref="BigtableVersion"/> attempts to hide this complexity by exposing
/// its underlying <see cref="Value"/> in terms of milliseconds, so if desired, a custom versioning
/// scheme of 1, 2, ... can be used rather than 1000, 2000, ... However, access to the underlying
/// microsecond value is still provided via <see cref="Micros"/>.
/// </para>
/// <para>
/// Note: when using ReadModifyWriteRow, modified columns automatically use a server version, which
/// is based on the current timestamp since the Unix epoch. For those columns, other reads and writes
/// should use <see cref="BigtableVersion"/> values constructed from DateTime values, as opposed to
/// using a custom versioning scheme with 64-bit values.
/// </para>
/// </remarks>
/// <param name="timestamp">
/// The timestamp whose milliseconds since the Unix epoch should be used as the version value. It must be specified in UTC.
/// </param>
public BigtableVersion(DateTime timestamp)
{
GaxPreconditions.CheckArgument(
timestamp.Kind == DateTimeKind.Utc,
nameof(timestamp),
$"The {nameof(BigtableVersion)} timestamp must be specified in UTC.");
GaxPreconditions.CheckArgumentRange(
timestamp,
nameof(timestamp),
UnixEpoch,
DateTime.MaxValue);
_micros = MicrosFromTimestamp(timestamp);
}
internal static BigtableVersion FromMicros(long value) => new BigtableVersion(value, valueIsMillis: false);
private static long MicrosFromTimestamp(DateTime timestamp) => ((timestamp.Ticks - UnixEpoch.Ticks) / TicksPerMilli) * MillisPerMicro;
/// <summary>
/// Gets the version value interpreted as microseconds of a timestamp since the Unix epoch.
/// Greater version values indicate newer cell values.
/// </summary>
public long Micros => _micros;
/// <summary>
/// Gets the version value. Greater version values indicate newer cell values.
/// </summary>
/// <remarks>
/// If timestamps are used as versions, this would be the milliseconds since the Unix epoch.
/// </remarks>
public long Value => _micros / 1000;
/// <summary>
/// Gets the DateTime equivalent to the version assuming the value is a timestamp milliseconds value since the Unix epoch.
/// </summary>
/// <returns>The DateTime representing the version timestamp.</returns>
public DateTime ToDateTime() => new DateTime((_micros * TicksPerMicro) + UnixEpoch.Ticks, DateTimeKind.Utc);
/// <inheritdoc />
public int CompareTo(object obj)
{
if (obj is BigtableVersion other)
{
return CompareTo(other);
}
throw new ArgumentException($"The specified object cannot be compared with {nameof(BigtableVersion)}", nameof(obj));
}
/// <inheritdoc />
public int CompareTo(BigtableVersion other) => _micros.CompareTo(other._micros);
/// <summary>
/// Compares two nullable <see cref="BigtableVersion"/> values.
/// </summary>
/// <param name="x">Left value to compare</param>
/// <param name="y">Right value to compare</param>
/// <returns>true if <paramref name="x"/> is less than <paramref name="y"/>; otherwise false.</returns>
public static int Compare(BigtableVersion? x, BigtableVersion? y)
{
if (x == null)
{
return y == null ? 0 : -1;
}
else if (y == null)
{
return 1;
}
return x.Value.CompareTo(y.Value);
}
/// <inheritdoc />
public bool Equals(BigtableVersion other) => CompareTo(other) == 0;
/// <inheritdoc />
public override bool Equals(object obj) => obj is BigtableVersion other && Equals(other);
/// <inheritdoc />
public override int GetHashCode() => _micros.GetHashCode();
/// <inheritdoc />
public override string ToString() => $"{nameof(BigtableVersion)}: {Value}";
/// <summary>
/// Operator overload to compare two <see cref="BigtableVersion"/> values.
/// </summary>
/// <param name="x">Left value to compare</param>
/// <param name="y">Right value to compare</param>
/// <returns>true if <paramref name="x"/> is less than <paramref name="y"/>; otherwise false.</returns>
public static bool operator <(BigtableVersion x, BigtableVersion y) => x._micros < y._micros;
/// <summary>
/// Operator overload to compare two <see cref="BigtableVersion"/> values.
/// </summary>
/// <param name="x">Left value to compare</param>
/// <param name="y">Right value to compare</param>
/// <returns>true if <paramref name="x"/> is less than or equal <paramref name="y"/>; otherwise false.</returns>
public static bool operator <=(BigtableVersion x, BigtableVersion y) => x._micros <= y._micros;
/// <summary>
/// Operator overload to compare two <see cref="BigtableVersion"/> values for equality.
/// </summary>
/// <param name="x">Left value to compare</param>
/// <param name="y">Right value to compare</param>
/// <returns>true if <paramref name="x"/> is equal to <paramref name="y"/>; otherwise false.</returns>
public static bool operator ==(BigtableVersion x, BigtableVersion y) => x._micros == y._micros;
/// <summary>
/// Operator overload to compare two <see cref="BigtableVersion"/> values for inequality.
/// </summary>
/// <param name="x">Left value to compare</param>
/// <param name="y">Right value to compare</param>
/// <returns>true if <paramref name="x"/> is not equal to <paramref name="y"/>; otherwise false.</returns>
public static bool operator !=(BigtableVersion x, BigtableVersion y) => x._micros != y._micros;
/// <summary>
/// Operator overload to compare two <see cref="BigtableVersion"/> values.
/// </summary>
/// <param name="x">Left value to compare</param>
/// <param name="y">Right value to compare</param>
/// <returns>true if <paramref name="x"/> is greater than or equal <paramref name="y"/>; otherwise false.</returns>
public static bool operator >=(BigtableVersion x, BigtableVersion y) => x._micros >= y._micros;
/// <summary>
/// Operator overload to compare two <see cref="BigtableVersion"/> values.
/// </summary>
/// <param name="x">Left value to compare</param>
/// <param name="y">Right value to compare</param>
/// <returns>true if <paramref name="x"/> is greater than <paramref name="y"/>; otherwise false.</returns>
public static bool operator >(BigtableVersion x, BigtableVersion y) => x._micros > y._micros;
}
internal static class BigtableVersionExtensions
{
public static long ToTimestampMicros(this BigtableVersion? version) =>
version == null ? 0 : version.Value.Micros;
public static BigtableVersion? ToVersion(this long? timestampMillis) =>
timestampMillis == null ?
default(BigtableVersion?) :
new BigtableVersion(timestampMillis.Value);
public static BigtableVersion? ToVersion(this DateTime? timestamp) =>
timestamp == null ?
default(BigtableVersion?) :
new BigtableVersion(timestamp.Value);
}
} | googleapis/google-cloud-dotnet | apis/Google.Cloud.Bigtable.V2/Google.Cloud.Bigtable.V2/BigtableVersion.cs | C# | apache-2.0 | 12,638 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.jdbc.test;
import java.io.IOException;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.Statement;
import java.util.Iterator;
import java.util.Map;
import java.util.ServiceLoader;
import org.apache.calcite.rel.core.JoinRelType;
import org.apache.drill.common.logical.LogicalPlan;
import org.apache.drill.common.logical.PlanProperties;
import org.apache.drill.common.logical.StoragePluginConfig;
import org.apache.drill.common.logical.data.Filter;
import org.apache.drill.common.logical.data.Join;
import org.apache.drill.common.logical.data.Limit;
import org.apache.drill.common.logical.data.LogicalOperator;
import org.apache.drill.common.logical.data.Order;
import org.apache.drill.common.logical.data.Project;
import org.apache.drill.common.logical.data.Scan;
import org.apache.drill.common.logical.data.Store;
import org.apache.drill.common.logical.data.Union;
import org.apache.drill.jdbc.JdbcTestBase;
import org.apache.drill.categories.JdbcTest;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.io.Resources;
import org.junit.experimental.categories.Category;
/** Unit tests for Drill's JDBC driver. */
@Ignore // ignore for now.
@Category(JdbcTest.class)
public class JdbcDataTest extends JdbcTestBase {
private static String MODEL;
private static String EXPECTED;
@BeforeClass
public static void setupFixtures() throws IOException {
MODEL = Resources.toString(Resources.getResource("test-models.json"), Charsets.UTF_8);
EXPECTED = Resources.toString(Resources.getResource("donuts-output-data.txt"), Charsets.UTF_8);
}
/**
* Command-line utility to execute a logical plan.
*
* <p>
* The forwarding method ensures that the IDE calls this method with the right classpath.
* </p>
*/
public static void main(String[] args) throws Exception {
}
/** Load driver. */
@Test
public void testLoadDriver() throws ClassNotFoundException {
Class.forName("org.apache.drill.jdbc.Driver");
}
/**
* Load the driver using ServiceLoader
*/
@Test
public void testLoadDriverServiceLoader() {
ServiceLoader<Driver> sl = ServiceLoader.load(Driver.class);
for(Iterator<Driver> it = sl.iterator(); it.hasNext(); ) {
Driver driver = it.next();
if (driver instanceof org.apache.drill.jdbc.Driver) {
return;
}
}
Assert.fail("org.apache.drill.jdbc.Driver not found using ServiceLoader");
}
/** Load driver and make a connection. */
@Test
public void testConnect() throws Exception {
Class.forName("org.apache.drill.jdbc.Driver");
final Connection connection = DriverManager.getConnection("jdbc:drill:zk=local");
connection.close();
}
/** Load driver, make a connection, prepare a statement. */
@Test
public void testPrepare() throws Exception {
withModel(MODEL, "DONUTS").withConnection(new Function<Connection, Void>() {
@Override
public Void apply(Connection connection) {
try {
final Statement statement = connection.prepareStatement("select * from donuts");
statement.close();
return null;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
});
}
/** Simple query against JSON. */
@Test
public void testSelectJson() throws Exception {
withModel(MODEL, "DONUTS").sql("select * from donuts").returns(EXPECTED);
}
/** Simple query against EMP table in HR database. */
@Test
public void testSelectEmployees() throws Exception {
withModel(MODEL, "HR")
.sql("select * from employees")
.returns(
"_MAP={deptId=31, lastName=Rafferty}\n" + "_MAP={deptId=33, lastName=Jones}\n"
+ "_MAP={deptId=33, lastName=Steinberg}\n" + "_MAP={deptId=34, lastName=Robinson}\n"
+ "_MAP={deptId=34, lastName=Smith}\n" + "_MAP={lastName=John}\n");
}
/** Simple query against EMP table in HR database. */
@Test
public void testSelectEmpView() throws Exception {
withModel(MODEL, "HR")
.sql("select * from emp")
.returns(
"DEPTID=31; LASTNAME=Rafferty\n" + "DEPTID=33; LASTNAME=Jones\n" + "DEPTID=33; LASTNAME=Steinberg\n"
+ "DEPTID=34; LASTNAME=Robinson\n" + "DEPTID=34; LASTNAME=Smith\n" + "DEPTID=null; LASTNAME=John\n");
}
/** Simple query against EMP table in HR database. */
@Test
public void testSelectDept() throws Exception {
withModel(MODEL, "HR")
.sql("select * from departments")
.returns(
"_MAP={deptId=31, name=Sales}\n" + "_MAP={deptId=33, name=Engineering}\n"
+ "_MAP={deptId=34, name=Clerical}\n" + "_MAP={deptId=35, name=Marketing}\n");
}
/** Query with project list. No field references yet. */
@Test
public void testProjectConstant() throws Exception {
withModel(MODEL, "DONUTS").sql("select 1 + 3 as c from donuts")
.returns("C=4\n" + "C=4\n" + "C=4\n" + "C=4\n" + "C=4\n");
}
/** Query that projects an element from the map. */
@Test
public void testProject() throws Exception {
withModel(MODEL, "DONUTS").sql("select _MAP['ppu'] as ppu from donuts")
.returns("PPU=0.55\n" + "PPU=0.69\n" + "PPU=0.55\n" + "PPU=0.69\n" + "PPU=1.0\n");
}
/** Same logic as {@link #testProject()}, but using a subquery. */
@Test
public void testProjectOnSubquery() throws Exception {
withModel(MODEL, "DONUTS").sql("select d['ppu'] as ppu from (\n" + " select _MAP as d from donuts)")
.returns("PPU=0.55\n" + "PPU=0.69\n" + "PPU=0.55\n" + "PPU=0.69\n" + "PPU=1.0\n");
}
/** Checks the logical plan. */
@Test
public void testProjectPlan() throws Exception {
LogicalPlan plan = withModel(MODEL, "DONUTS")
.sql("select _MAP['ppu'] as ppu from donuts")
.logicalPlan();
PlanProperties planProperties = plan.getProperties();
Assert.assertEquals("optiq", planProperties.generator.type);
Assert.assertEquals("na", planProperties.generator.info);
Assert.assertEquals(1, planProperties.version);
Assert.assertEquals(PlanProperties.PlanType.APACHE_DRILL_LOGICAL, planProperties.type);
Map<String, StoragePluginConfig> seConfigs = plan.getStorageEngines();
StoragePluginConfig config = seConfigs.get("donuts-json");
// Assert.assertTrue(config != null && config instanceof ClasspathRSE.ClasspathRSEConfig);
config = seConfigs.get("queue");
// Assert.assertTrue(config != null && config instanceof QueueRSE.QueueRSEConfig);
Scan scan = findOnlyOperator(plan, Scan.class);
Assert.assertEquals("donuts-json", scan.getStorageEngine());
Project project = findOnlyOperator(plan, Project.class);
Assert.assertEquals(1, project.getSelections().size());
Assert.assertEquals(Scan.class, project.getInput().getClass());
Store store = findOnlyOperator(plan, Store.class);
Assert.assertEquals("queue", store.getStorageEngine());
Assert.assertEquals("output sink", store.getMemo());
Assert.assertEquals(Project.class, store.getInput().getClass());
}
/**
* Query with subquery, filter, and projection of one real and one nonexistent field from a map field.
*/
@Test
public void testProjectFilterSubquery() throws Exception {
withModel(MODEL, "DONUTS")
.sql(
"select d['name'] as name, d['xx'] as xx from (\n" + " select _MAP as d from donuts)\n"
+ "where cast(d['ppu'] as double) > 0.6")
.returns("NAME=Raised; XX=null\n" + "NAME=Filled; XX=null\n" + "NAME=Apple Fritter; XX=null\n");
}
private static <T extends LogicalOperator> Iterable<T> findOperator(LogicalPlan plan, final Class<T> operatorClazz) {
return (Iterable<T>) Iterables.filter(plan.getSortedOperators(), new Predicate<LogicalOperator>() {
@Override
public boolean apply(LogicalOperator input) {
return input.getClass().equals(operatorClazz);
}
});
}
private static <T extends LogicalOperator> T findOnlyOperator(LogicalPlan plan, final Class<T> operatorClazz) {
return Iterables.getOnlyElement(findOperator(plan, operatorClazz));
}
@Test
public void testProjectFilterSubqueryPlan() throws Exception {
LogicalPlan plan = withModel(MODEL, "DONUTS")
.sql(
"select d['name'] as name, d['xx'] as xx from (\n" + " select _MAP['donuts'] as d from donuts)\n"
+ "where cast(d['ppu'] as double) > 0.6")
.logicalPlan();
PlanProperties planProperties = plan.getProperties();
Assert.assertEquals("optiq", planProperties.generator.type);
Assert.assertEquals("na", planProperties.generator.info);
Assert.assertEquals(1, planProperties.version);
Assert.assertEquals(PlanProperties.PlanType.APACHE_DRILL_LOGICAL, planProperties.type);
Map<String, StoragePluginConfig> seConfigs = plan.getStorageEngines();
StoragePluginConfig config = seConfigs.get("donuts-json");
// Assert.assertTrue(config != null && config instanceof ClasspathRSE.ClasspathRSEConfig);
config = seConfigs.get("queue");
// Assert.assertTrue(config != null && config instanceof QueueRSE.QueueRSEConfig);
Scan scan = findOnlyOperator(plan, Scan.class);
Assert.assertEquals("donuts-json", scan.getStorageEngine());
Filter filter = findOnlyOperator(plan, Filter.class);
Assert.assertTrue(filter.getInput() instanceof Scan);
Project[] projects = Iterables.toArray(findOperator(plan, Project.class), Project.class);
Assert.assertEquals(2, projects.length);
Assert.assertEquals(1, projects[0].getSelections().size());
Assert.assertEquals(Filter.class, projects[0].getInput().getClass());
Assert.assertEquals(2, projects[1].getSelections().size());
Assert.assertEquals(Project.class, projects[1].getInput().getClass());
Store store = findOnlyOperator(plan, Store.class);
Assert.assertEquals("queue", store.getStorageEngine());
Assert.assertEquals("output sink", store.getMemo());
Assert.assertEquals(Project.class, store.getInput().getClass());
}
/** Query that projects one field. (Disabled; uses sugared syntax.) */
@Test @Ignore
public void testProjectNestedFieldSugared() throws Exception {
withModel(MODEL, "DONUTS").sql("select donuts.ppu from donuts")
.returns("C=4\n" + "C=4\n" + "C=4\n" + "C=4\n" + "C=4\n");
}
/** Query with filter. No field references yet. */
@Test
public void testFilterConstantFalse() throws Exception {
withModel(MODEL, "DONUTS").sql("select * from donuts where 3 > 4").returns("");
}
@Test
public void testFilterConstant() throws Exception {
withModel(MODEL, "DONUTS").sql("select * from donuts where 3 < 4").returns(EXPECTED);
}
@Ignore
@Test
public void testValues() throws Exception {
withModel(MODEL, "DONUTS").sql("values (1)").returns("EXPR$0=1\n");
// Enable when https://issues.apache.org/jira/browse/DRILL-57 fixed
// .planContains("store");
}
@Test
public void testJoin() throws Exception {
Join join = withModel(MODEL, "HR")
.sql("select * from emp join dept on emp.deptId = dept.deptId")
.returnsUnordered("DEPTID=31; LASTNAME=Rafferty; DEPTID0=31; NAME=Sales",
"DEPTID=33; LASTNAME=Jones; DEPTID0=33; NAME=Engineering",
"DEPTID=33; LASTNAME=Steinberg; DEPTID0=33; NAME=Engineering",
"DEPTID=34; LASTNAME=Robinson; DEPTID0=34; NAME=Clerical",
"DEPTID=34; LASTNAME=Smith; DEPTID0=34; NAME=Clerical").planContains(Join.class);
Assert.assertEquals(JoinRelType.INNER, join.getJoinType());
}
@Test
public void testLeftJoin() throws Exception {
Join join = withModel(MODEL, "HR")
.sql("select * from emp left join dept on emp.deptId = dept.deptId")
.returnsUnordered("DEPTID=31; LASTNAME=Rafferty; DEPTID0=31; NAME=Sales",
"DEPTID=33; LASTNAME=Jones; DEPTID0=33; NAME=Engineering",
"DEPTID=33; LASTNAME=Steinberg; DEPTID0=33; NAME=Engineering",
"DEPTID=34; LASTNAME=Robinson; DEPTID0=34; NAME=Clerical",
"DEPTID=34; LASTNAME=Smith; DEPTID0=34; NAME=Clerical",
"DEPTID=null; LASTNAME=John; DEPTID0=null; NAME=null").planContains(Join.class);
Assert.assertEquals(JoinRelType.LEFT, join.getJoinType());
}
/**
* Right join is tricky because Drill's "join" operator only supports "left", so we have to flip inputs.
*/
@Test @Ignore
public void testRightJoin() throws Exception {
Join join = withModel(MODEL, "HR").sql("select * from emp right join dept on emp.deptId = dept.deptId")
.returnsUnordered("xx").planContains(Join.class);
Assert.assertEquals(JoinRelType.LEFT, join.getJoinType());
}
@Test
public void testFullJoin() throws Exception {
Join join = withModel(MODEL, "HR")
.sql("select * from emp full join dept on emp.deptId = dept.deptId")
.returnsUnordered("DEPTID=31; LASTNAME=Rafferty; DEPTID0=31; NAME=Sales",
"DEPTID=33; LASTNAME=Jones; DEPTID0=33; NAME=Engineering",
"DEPTID=33; LASTNAME=Steinberg; DEPTID0=33; NAME=Engineering",
"DEPTID=34; LASTNAME=Robinson; DEPTID0=34; NAME=Clerical",
"DEPTID=34; LASTNAME=Smith; DEPTID0=34; NAME=Clerical",
"DEPTID=null; LASTNAME=John; DEPTID0=null; NAME=null",
"DEPTID=null; LASTNAME=null; DEPTID0=35; NAME=Marketing").planContains(Join.class);
Assert.assertEquals(JoinRelType.FULL, join.getJoinType());
}
/**
* Join on subquery; also tests that if a field of the same name exists in both inputs, both fields make it through
* the join.
*/
@Test
public void testJoinOnSubquery() throws Exception {
Join join = withModel(MODEL, "HR")
.sql(
"select * from (\n" + "select deptId, lastname, 'x' as name from emp) as e\n"
+ " join dept on e.deptId = dept.deptId")
.returnsUnordered("DEPTID=31; LASTNAME=Rafferty; NAME=x; DEPTID0=31; NAME0=Sales",
"DEPTID=33; LASTNAME=Jones; NAME=x; DEPTID0=33; NAME0=Engineering",
"DEPTID=33; LASTNAME=Steinberg; NAME=x; DEPTID0=33; NAME0=Engineering",
"DEPTID=34; LASTNAME=Robinson; NAME=x; DEPTID0=34; NAME0=Clerical",
"DEPTID=34; LASTNAME=Smith; NAME=x; DEPTID0=34; NAME0=Clerical").planContains(Join.class);
Assert.assertEquals(JoinRelType.INNER, join.getJoinType());
}
/** Tests that one of the FoodMart tables is present. */
@Test @Ignore
public void testFoodMart() throws Exception {
withModel(MODEL, "FOODMART")
.sql("select * from product_class where cast(_map['product_class_id'] as integer) < 3")
.returnsUnordered(
"_MAP={product_category=Seafood, product_class_id=2, product_department=Seafood, product_family=Food, product_subcategory=Shellfish}",
"_MAP={product_category=Specialty, product_class_id=1, product_department=Produce, product_family=Food, product_subcategory=Nuts}");
}
@Test
public void testUnionAll() throws Exception {
Union union = withModel(MODEL, "HR")
.sql("select deptId from dept\n" + "union all\n" + "select deptId from emp")
.returnsUnordered("DEPTID=31", "DEPTID=33", "DEPTID=34", "DEPTID=35", "DEPTID=null")
.planContains(Union.class);
Assert.assertFalse(union.isDistinct());
}
@Test
public void testUnion() throws Exception {
Union union = withModel(MODEL, "HR")
.sql("select deptId from dept\n" + "union\n" + "select deptId from emp")
.returnsUnordered("DEPTID=31", "DEPTID=33", "DEPTID=34", "DEPTID=35", "DEPTID=null")
.planContains(Union.class);
Assert.assertTrue(union.isDistinct());
}
@Test
public void testOrderByDescNullsFirst() throws Exception {
// desc nulls last
withModel(MODEL, "HR")
.sql("select * from emp order by deptId desc nulls first")
.returns(
"DEPTID=null; LASTNAME=John\n" + "DEPTID=34; LASTNAME=Robinson\n" + "DEPTID=34; LASTNAME=Smith\n"
+ "DEPTID=33; LASTNAME=Jones\n" + "DEPTID=33; LASTNAME=Steinberg\n" + "DEPTID=31; LASTNAME=Rafferty\n")
.planContains(Order.class);
}
@Test
public void testOrderByDescNullsLast() throws Exception {
// desc nulls first
withModel(MODEL, "HR")
.sql("select * from emp order by deptId desc nulls last")
.returns(
"DEPTID=34; LASTNAME=Robinson\n" + "DEPTID=34; LASTNAME=Smith\n" + "DEPTID=33; LASTNAME=Jones\n"
+ "DEPTID=33; LASTNAME=Steinberg\n" + "DEPTID=31; LASTNAME=Rafferty\n" + "DEPTID=null; LASTNAME=John\n")
.planContains(Order.class);
}
@Test @Ignore
public void testOrderByDesc() throws Exception {
// desc is implicitly "nulls first" (i.e. null sorted as +inf)
// Current behavior is to sort nulls last. This is wrong.
withModel(MODEL, "HR")
.sql("select * from emp order by deptId desc")
.returns(
"DEPTID=null; LASTNAME=John\n" + "DEPTID=34; LASTNAME=Robinson\n" + "DEPTID=34; LASTNAME=Smith\n"
+ "DEPTID=33; LASTNAME=Jones\n" + "DEPTID=33; LASTNAME=Steinberg\n" + "DEPTID=31; LASTNAME=Rafferty\n")
.planContains(Order.class);
}
@Test
public void testOrderBy() throws Exception {
// no sort order specified is implicitly "asc", and asc is "nulls last"
withModel(MODEL, "HR")
.sql("select * from emp order by deptId")
.returns(
"DEPTID=31; LASTNAME=Rafferty\n"
+ "DEPTID=33; LASTNAME=Jones\n"
+ "DEPTID=33; LASTNAME=Steinberg\n"
+ "DEPTID=34; LASTNAME=Robinson\n"
+ "DEPTID=34; LASTNAME=Smith\n"
+ "DEPTID=null; LASTNAME=John\n")
.planContains(Order.class);
}
@Test
public void testLimit() throws Exception {
withModel(MODEL, "HR")
.sql("select LASTNAME from emp limit 2")
.returns("LASTNAME=Rafferty\n" +
"LASTNAME=Jones")
.planContains(Limit.class);
}
@Test
public void testLimitOrderBy() throws Exception {
TestDataConnection tdc = withModel(MODEL, "HR")
.sql("select LASTNAME from emp order by LASTNAME limit 2")
.returns("LASTNAME=John\n" +
"LASTNAME=Jones");
tdc.planContains(Limit.class);
tdc.planContains(Order.class);
}
@Test
public void testOrderByWithOffset() throws Exception {
withModel(MODEL, "HR")
.sql("select LASTNAME from emp order by LASTNAME asc offset 3")
.returns("LASTNAME=Robinson\n" +
"LASTNAME=Smith\n" +
"LASTNAME=Steinberg")
.planContains(Limit.class);
}
@Test
public void testOrderByWithOffsetAndFetch() throws Exception {
withModel(MODEL, "HR")
.sql("select LASTNAME from emp order by LASTNAME asc offset 3 fetch next 2 rows only")
.returns("LASTNAME=Robinson\n" +
"LASTNAME=Smith")
.planContains(Limit.class);
}
}
| parthchandra/incubator-drill | exec/jdbc/src/test/java/org/apache/drill/jdbc/test/JdbcDataTest.java | Java | apache-2.0 | 19,946 |
"""Sensor to collect the reference daily prices of electricity ('PVPC') in Spain."""
import logging
from random import randint
from typing import Optional
from aiopvpc import PVPCData
from homeassistant import config_entries
from homeassistant.const import CONF_NAME, ENERGY_KILO_WATT_HOUR
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.event import async_call_later, async_track_time_change
from homeassistant.helpers.restore_state import RestoreEntity
import homeassistant.util.dt as dt_util
from .const import ATTR_TARIFF
_LOGGER = logging.getLogger(__name__)
ATTR_PRICE = "price"
ICON = "mdi:currency-eur"
UNIT = f"€/{ENERGY_KILO_WATT_HOUR}"
_DEFAULT_TIMEOUT = 10
async def async_setup_entry(
hass: HomeAssistant, config_entry: config_entries.ConfigEntry, async_add_entities
):
"""Set up the electricity price sensor from config_entry."""
name = config_entry.data[CONF_NAME]
pvpc_data_handler = PVPCData(
tariff=config_entry.data[ATTR_TARIFF],
local_timezone=hass.config.time_zone,
websession=async_get_clientsession(hass),
logger=_LOGGER,
timeout=_DEFAULT_TIMEOUT,
)
async_add_entities(
[ElecPriceSensor(name, config_entry.unique_id, pvpc_data_handler)], False
)
class ElecPriceSensor(RestoreEntity):
"""Class to hold the prices of electricity as a sensor."""
unit_of_measurement = UNIT
icon = ICON
should_poll = False
def __init__(self, name, unique_id, pvpc_data_handler):
"""Initialize the sensor object."""
self._name = name
self._unique_id = unique_id
self._pvpc_data = pvpc_data_handler
self._num_retries = 0
self._hourly_tracker = None
self._price_tracker = None
async def async_will_remove_from_hass(self) -> None:
"""Cancel listeners for sensor updates."""
self._hourly_tracker()
self._price_tracker()
async def async_added_to_hass(self):
"""Handle entity which will be added."""
await super().async_added_to_hass()
state = await self.async_get_last_state()
if state:
self._pvpc_data.state = state.state
# Update 'state' value in hour changes
self._hourly_tracker = async_track_time_change(
self.hass, self.update_current_price, second=[0], minute=[0]
)
# Update prices at random time, 2 times/hour (don't want to upset API)
random_minute = randint(1, 29)
mins_update = [random_minute, random_minute + 30]
self._price_tracker = async_track_time_change(
self.hass, self.async_update_prices, second=[0], minute=mins_update
)
_LOGGER.debug(
"Setup of price sensor %s (%s) with tariff '%s', "
"updating prices each hour at %s min",
self.name,
self.entity_id,
self._pvpc_data.tariff,
mins_update,
)
await self.async_update_prices(dt_util.utcnow())
self.update_current_price(dt_util.utcnow())
@property
def unique_id(self) -> Optional[str]:
"""Return a unique ID."""
return self._unique_id
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._pvpc_data.state
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._pvpc_data.state_available
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._pvpc_data.attributes
@callback
def update_current_price(self, now):
"""Update the sensor state, by selecting the current price for this hour."""
self._pvpc_data.process_state_and_attributes(now)
self.async_write_ha_state()
async def async_update_prices(self, now):
"""Update electricity prices from the ESIOS API."""
prices = await self._pvpc_data.async_update_prices(now)
if not prices and self._pvpc_data.source_available:
self._num_retries += 1
if self._num_retries > 2:
_LOGGER.warning(
"%s: repeated bad data update, mark component as unavailable source",
self.entity_id,
)
self._pvpc_data.source_available = False
return
retry_delay = 2 * self._num_retries * self._pvpc_data.timeout
_LOGGER.debug(
"%s: Bad update[retry:%d], will try again in %d s",
self.entity_id,
self._num_retries,
retry_delay,
)
async_call_later(self.hass, retry_delay, self.async_update_prices)
return
if not prices:
_LOGGER.debug("%s: data source is not yet available", self.entity_id)
return
self._num_retries = 0
if not self._pvpc_data.source_available:
self._pvpc_data.source_available = True
_LOGGER.warning("%s: component has recovered data access", self.entity_id)
self.update_current_price(now)
| nkgilley/home-assistant | homeassistant/components/pvpc_hourly_pricing/sensor.py | Python | apache-2.0 | 5,339 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training helper that checkpoints models and creates session."""
import time
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_management
from tensorflow.python.util.tf_export import tf_export
def _maybe_name(obj):
"""Returns object name if it has one, or a message otherwise.
This is useful for names that apper in error messages.
Args:
obj: Object to get the name of.
Returns:
name, "None", or a "no name" message.
"""
if obj is None:
return "None"
elif hasattr(obj, "name"):
return obj.name
else:
return "<no name for %s>" % type(obj)
def _restore_checkpoint_and_maybe_run_saved_model_initializers(
sess, saver, path):
"""Restores checkpoint values and SavedModel initializers if found."""
# NOTE: All references to SavedModel refer to SavedModels loaded from the
# load_v2 API (which does not require the `sess` argument).
# If the graph contains resources loaded from a SavedModel, they are not
# restored when calling `saver.restore`. Thus, the SavedModel initializer must
# be called with `saver.restore` to properly initialize the model.
# The SavedModel init is stored in the "saved_model_initializers" collection.
# This collection is part of the MetaGraph's default_init_op, so it is already
# called by MonitoredSession as long as the saver doesn't restore any
# checkpoints from the working dir.
saved_model_init_ops = ops.get_collection("saved_model_initializers")
if saved_model_init_ops:
sess.run(saved_model_init_ops)
# The saver must be called *after* the SavedModel init, because the SavedModel
# init will restore the variables from the SavedModel variables directory.
# Initializing/restoring twice is not ideal but there's no other way to do it.
saver.restore(sess, path)
@tf_export(v1=["train.SessionManager"])
class SessionManager(object):
"""Training helper that restores from checkpoint and creates session.
This class is a small wrapper that takes care of session creation and
checkpoint recovery. It also provides functions that to facilitate
coordination among multiple training threads or processes.
* Checkpointing trained variables as the training progresses.
* Initializing variables on startup, restoring them from the most recent
checkpoint after a crash, or wait for checkpoints to become available.
### Usage:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will checkpoint the model in '/tmp/mydir'.
sm = SessionManager()
sess = sm.prepare_session(master, init_op, saver, checkpoint_dir)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`prepare_session()` initializes or restores a model. It requires `init_op`
and `saver` as an argument.
A second process could wait for the model to be ready by doing the following:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will wait for the model to become ready.
sm = SessionManager()
sess = sm.wait_for_session(master)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`wait_for_session()` waits for a model to be initialized by other processes.
"""
def __init__(self,
local_init_op=None,
ready_op=None,
ready_for_local_init_op=None,
graph=None,
recovery_wait_secs=30,
local_init_run_options=None,
local_init_feed_dict=None):
"""Creates a SessionManager.
The `local_init_op` is an `Operation` that is run always after a new session
was created. If `None`, this step is skipped.
The `ready_op` is an `Operation` used to check if the model is ready. The
model is considered ready if that operation returns an empty 1D string
tensor. If the operation returns a non empty 1D string tensor, the elements
are concatenated and used to indicate to the user why the model is not
ready.
The `ready_for_local_init_op` is an `Operation` used to check if the model
is ready to run local_init_op. The model is considered ready if that
operation returns an empty 1D string tensor. If the operation returns a non
empty 1D string tensor, the elements are concatenated and used to indicate
to the user why the model is not ready.
If `ready_op` is `None`, the model is not checked for readiness.
`recovery_wait_secs` is the number of seconds between checks that
the model is ready. It is used by processes to wait for a model to
be initialized or restored. Defaults to 30 seconds.
Args:
local_init_op: An `Operation` run immediately after session creation.
Usually used to initialize tables and local variables.
ready_op: An `Operation` to check if the model is initialized.
ready_for_local_init_op: An `Operation` to check if the model is ready
to run local_init_op.
graph: The `Graph` that the model will use.
recovery_wait_secs: Seconds between checks for the model to be ready.
local_init_run_options: RunOptions to be passed to session.run when
executing the local_init_op.
local_init_feed_dict: Optional session feed dictionary to use when running
the local_init_op.
Raises:
ValueError: If ready_for_local_init_op is not None but local_init_op is
None
"""
# Sets default values of arguments.
if graph is None:
graph = ops.get_default_graph()
self._local_init_op = local_init_op
self._ready_op = ready_op
self._ready_for_local_init_op = ready_for_local_init_op
self._graph = graph
self._recovery_wait_secs = recovery_wait_secs
self._target = None
self._local_init_run_options = local_init_run_options
self._local_init_feed_dict = local_init_feed_dict
if ready_for_local_init_op is not None and local_init_op is None:
raise ValueError("If you pass a ready_for_local_init_op "
"you must also pass a local_init_op "
", ready_for_local_init_op [%s]" %
ready_for_local_init_op)
def _restore_checkpoint(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, and tries to restore a checkpoint.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, is_restored) where 'is_restored' is `True` if
the session could be restored, `False` otherwise.
Raises:
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
self._target = master
# This is required to so that we initialize the TPU device before
# restoring from checkpoint since we'll be placing variables on the device
# and TPUInitialize wipes out the memory of the device.
strategy = distribution_strategy_context.get_strategy()
if strategy and hasattr(strategy.extended,
"_experimental_initialize_system"):
strategy.extended._experimental_initialize_system() # pylint: disable=protected-access
sess = session.Session(self._target, graph=self._graph, config=config)
if checkpoint_dir and checkpoint_filename_with_path:
raise ValueError("Can not provide both checkpoint_dir and "
"checkpoint_filename_with_path.")
# If either saver or checkpoint_* is not specified, cannot restore. Just
# return.
if not saver or not (checkpoint_dir or checkpoint_filename_with_path):
return sess, False
if checkpoint_filename_with_path:
_restore_checkpoint_and_maybe_run_saved_model_initializers(
sess, saver, checkpoint_filename_with_path)
return sess, True
# Waits up until max_wait_secs for checkpoint to become available.
wait_time = 0
ckpt = checkpoint_management.get_checkpoint_state(checkpoint_dir)
while not ckpt or not ckpt.model_checkpoint_path:
if wait_for_checkpoint and wait_time < max_wait_secs:
logging.info("Waiting for checkpoint to be available.")
time.sleep(self._recovery_wait_secs)
wait_time += self._recovery_wait_secs
ckpt = checkpoint_management.get_checkpoint_state(checkpoint_dir)
else:
return sess, False
# Loads the checkpoint.
_restore_checkpoint_and_maybe_run_saved_model_initializers(
sess, saver, ckpt.model_checkpoint_path)
saver.recover_last_checkpoints(ckpt.all_model_checkpoint_paths)
return sess, True
def prepare_session(self,
master,
init_op=None,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None,
init_feed_dict=None,
init_fn=None):
"""Creates a `Session`. Makes sure the model is ready to be used.
Creates a `Session` on 'master'. If a `saver` object is passed in, and
`checkpoint_dir` points to a directory containing valid checkpoint
files, then it will try to recover the model from checkpoint. If
no checkpoint files are available, and `wait_for_checkpoint` is
`True`, then the process would check every `recovery_wait_secs`,
up to `max_wait_secs`, for recovery to succeed.
If the model cannot be recovered successfully then it is initialized by
running the `init_op` and calling `init_fn` if they are provided.
The `local_init_op` is also run after init_op and init_fn, regardless of
whether the model was recovered successfully, but only if
`ready_for_local_init_op` passes.
If the model is recovered from a checkpoint it is assumed that all
global variables have been initialized, in particular neither `init_op`
nor `init_fn` will be executed.
It is an error if the model cannot be recovered and no `init_op`
or `init_fn` or `local_init_op` are passed.
Args:
master: `String` representation of the TensorFlow master to use.
init_op: Optional `Operation` used to initialize the model.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
init_feed_dict: Optional dictionary that maps `Tensor` objects to feed
values. This feed dictionary is passed to the session `run()` call when
running the init op.
init_fn: Optional callable used to initialize the model. Called after the
optional `init_op` is called. The callable must accept one argument,
the session being initialized.
Returns:
A `Session` object that can be used to drive the model.
Raises:
RuntimeError: If the model cannot be initialized or recovered.
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
sess, is_loaded_from_checkpoint = self._restore_checkpoint(
master,
saver,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs,
config=config)
if not is_loaded_from_checkpoint:
if init_op is None and not init_fn and self._local_init_op is None:
raise RuntimeError("Model is not initialized and no init_op or "
"init_fn or local_init_op was given")
if init_op is not None:
sess.run(init_op, feed_dict=init_feed_dict)
if init_fn:
init_fn(sess)
local_init_success, msg = self._try_run_local_init_op(sess)
if not local_init_success:
raise RuntimeError(
"Init operations did not make model ready for local_init. "
"Init op: %s, init fn: %s, error: %s" % (_maybe_name(init_op),
init_fn,
msg))
is_ready, msg = self._model_ready(sess)
if not is_ready:
raise RuntimeError(
"Init operations did not make model ready. "
"Init op: %s, init fn: %s, local_init_op: %s, error: %s" %
(_maybe_name(init_op), init_fn, self._local_init_op, msg))
return sess
def recover_session(self,
master,
saver=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None,
wait_for_checkpoint=False,
max_wait_secs=7200,
config=None):
"""Creates a `Session`, recovering if possible.
Creates a new session on 'master'. If the session is not initialized
and can be recovered from a checkpoint, recover it.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files. The latest checkpoint in the
dir will be used to restore.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, initialized) where 'initialized' is `True` if
the session could be recovered and initialized, `False` otherwise.
Raises:
ValueError: If both checkpoint_dir and checkpoint_filename_with_path are
set.
"""
sess, is_loaded_from_checkpoint = self._restore_checkpoint(
master,
saver,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs,
config=config)
# Always try to run local_init_op
local_init_success, msg = self._try_run_local_init_op(sess)
if not is_loaded_from_checkpoint:
# Do not need to run checks for readiness
return sess, False
restoring_file = checkpoint_dir or checkpoint_filename_with_path
if not local_init_success:
logging.info(
"Restoring model from %s did not make model ready for local init:"
" %s", restoring_file, msg)
return sess, False
is_ready, msg = self._model_ready(sess)
if not is_ready:
logging.info("Restoring model from %s did not make model ready: %s",
restoring_file, msg)
return sess, False
logging.info("Restored model from %s", restoring_file)
return sess, is_loaded_from_checkpoint
def wait_for_session(self, master, config=None, max_wait_secs=float("Inf")):
"""Creates a new `Session` and waits for model to be ready.
Creates a new `Session` on 'master'. Waits for the model to be
initialized or recovered from a checkpoint. It's expected that
another thread or process will make the model ready, and that this
is intended to be used by threads/processes that participate in a
distributed training configuration where a different thread/process
is responsible for initializing or recovering the model being trained.
NB: The amount of time this method waits for the session is bounded
by max_wait_secs. By default, this function will wait indefinitely.
Args:
master: `String` representation of the TensorFlow master to use.
config: Optional ConfigProto proto used to configure the session.
max_wait_secs: Maximum time to wait for the session to become available.
Returns:
A `Session`. May be None if the operation exceeds the timeout
specified by config.operation_timeout_in_ms.
Raises:
tf.DeadlineExceededError: if the session is not available after
max_wait_secs.
"""
self._target = master
if max_wait_secs is None:
max_wait_secs = float("Inf")
timer = _CountDownTimer(max_wait_secs)
while True:
sess = session.Session(self._target, graph=self._graph, config=config)
not_ready_msg = None
not_ready_local_msg = None
local_init_success, not_ready_local_msg = self._try_run_local_init_op(
sess)
if local_init_success:
# Successful if local_init_op is None, or ready_for_local_init_op passes
is_ready, not_ready_msg = self._model_ready(sess)
if is_ready:
return sess
self._safe_close(sess)
# Do we have enough time left to try again?
remaining_ms_after_wait = (
timer.secs_remaining() - self._recovery_wait_secs)
if remaining_ms_after_wait < 0:
raise errors.DeadlineExceededError(
None, None,
"Session was not ready after waiting %d secs." % (max_wait_secs,))
logging.info("Waiting for model to be ready. "
"Ready_for_local_init_op: %s, ready: %s",
not_ready_local_msg, not_ready_msg)
time.sleep(self._recovery_wait_secs)
def _safe_close(self, sess):
"""Closes a session without raising an exception.
Just like sess.close() but ignores exceptions.
Args:
sess: A `Session`.
"""
# pylint: disable=broad-except
try:
sess.close()
except Exception:
# Intentionally not logging to avoid user complaints that
# they get cryptic errors. We really do not care that Close
# fails.
pass
# pylint: enable=broad-except
def _model_ready(self, sess):
"""Checks if the model is ready or not.
Args:
sess: A `Session`.
Returns:
A tuple (is_ready, msg), where is_ready is True if ready and False
otherwise, and msg is `None` if the model is ready, a `String` with the
reason why it is not ready otherwise.
"""
return _ready(self._ready_op, sess, "Model not ready")
def _model_ready_for_local_init(self, sess):
"""Checks if the model is ready to run local_init_op.
Args:
sess: A `Session`.
Returns:
A tuple (is_ready, msg), where is_ready is True if ready to run
local_init_op and False otherwise, and msg is `None` if the model is
ready to run local_init_op, a `String` with the reason why it is not ready
otherwise.
"""
return _ready(self._ready_for_local_init_op, sess,
"Model not ready for local init")
def _try_run_local_init_op(self, sess):
"""Tries to run _local_init_op, if not None, and is ready for local init.
Args:
sess: A `Session`.
Returns:
A tuple (is_successful, msg), where is_successful is True if
_local_init_op is None, or we ran _local_init_op, and False otherwise;
and msg is a `String` with the reason why the model was not ready to run
local init.
"""
if self._local_init_op is not None:
is_ready_for_local_init, msg = self._model_ready_for_local_init(sess)
if is_ready_for_local_init:
logging.info("Running local_init_op.")
sess.run(self._local_init_op, feed_dict=self._local_init_feed_dict,
options=self._local_init_run_options)
logging.info("Done running local_init_op.")
return True, None
else:
return False, msg
return True, None
def _ready(op, sess, msg):
"""Checks if the model is ready or not, as determined by op.
Args:
op: An op, either _ready_op or _ready_for_local_init_op, which defines the
readiness of the model.
sess: A `Session`.
msg: A message to log to warning if not ready
Returns:
A tuple (is_ready, msg), where is_ready is True if ready and False
otherwise, and msg is `None` if the model is ready, a `String` with the
reason why it is not ready otherwise.
"""
if op is None:
return True, None
else:
try:
ready_value = sess.run(op)
# The model is considered ready if ready_op returns an empty 1-D tensor.
# Also compare to `None` and dtype being int32 for backward
# compatibility.
if (ready_value is None or ready_value.dtype == np.int32 or
ready_value.size == 0):
return True, None
else:
# TODO(sherrym): If a custom ready_op returns other types of tensor,
# or strings other than variable names, this message could be
# confusing.
non_initialized_varnames = ", ".join(
[i.decode("utf-8") for i in ready_value])
return False, "Variables not initialized: " + non_initialized_varnames
except errors.FailedPreconditionError as e:
if "uninitialized" not in str(e):
logging.warning("%s : error [%s]", msg, str(e))
raise e
return False, str(e)
class _CountDownTimer(object):
__slots__ = ["_start_time_secs", "_duration_secs"]
def __init__(self, duration_secs):
self._start_time_secs = time.time()
self._duration_secs = duration_secs
def secs_remaining(self):
diff = self._duration_secs - (time.time() - self._start_time_secs)
return max(0, diff)
| tensorflow/tensorflow | tensorflow/python/training/session_manager.py | Python | apache-2.0 | 23,320 |
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.analytics.model.sabrcube;
import static com.opengamma.engine.value.ValueRequirementNames.SABR_SURFACES;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.analytics.financial.interestrate.PresentValueCurveSensitivitySABRCalculator;
import com.opengamma.analytics.financial.interestrate.PresentValueNodeSensitivityCalculator;
import com.opengamma.analytics.financial.interestrate.YieldCurveBundle;
import com.opengamma.analytics.financial.model.option.definition.SABRInterestRateCorrelationParameters;
import com.opengamma.analytics.financial.model.option.definition.SABRInterestRateDataBundle;
import com.opengamma.analytics.math.function.DoubleFunction1D;
import com.opengamma.analytics.math.surface.InterpolatedDoublesSurface;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.FunctionInputs;
import com.opengamma.engine.target.ComputationTargetType;
import com.opengamma.engine.value.SurfaceAndCubePropertyNames;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValuePropertyNames;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.financial.analytics.model.sabr.SABRDiscountingFunction;
import com.opengamma.financial.analytics.model.volatility.SmileFittingPropertyNamesAndValues;
import com.opengamma.financial.analytics.volatility.fittedresults.SABRFittedSurfaces;
import com.opengamma.financial.security.FinancialSecurityTypes;
import com.opengamma.financial.security.FinancialSecurityUtils;
import com.opengamma.util.money.Currency;
/**
* @deprecated Use descendants of {@link SABRDiscountingFunction}
*/
@Deprecated
public class SABRCMSSpreadNoExtrapolationYCNSFunction extends SABRYCNSFunction {
private static final PresentValueNodeSensitivityCalculator NSC = PresentValueNodeSensitivityCalculator.using(PresentValueCurveSensitivitySABRCalculator.getInstance());
@Override
public ComputationTargetType getTargetType() {
return FinancialSecurityTypes.CAP_FLOOR_CMS_SPREAD_SECURITY;
}
@Override
protected SABRInterestRateDataBundle getModelParameters(final ComputationTarget target, final FunctionInputs inputs, final Currency currency,
final YieldCurveBundle yieldCurves, final ValueRequirement desiredValue) {
final Object surfacesObject = inputs.getValue(SABR_SURFACES);
if (surfacesObject == null) {
throw new OpenGammaRuntimeException("Could not get SABR parameter surfaces");
}
final SABRFittedSurfaces surfaces = (SABRFittedSurfaces) surfacesObject;
final InterpolatedDoublesSurface alphaSurface = surfaces.getAlphaSurface();
final InterpolatedDoublesSurface betaSurface = surfaces.getBetaSurface();
final InterpolatedDoublesSurface nuSurface = surfaces.getNuSurface();
final InterpolatedDoublesSurface rhoSurface = surfaces.getRhoSurface();
final DoubleFunction1D correlationFunction = getCorrelationFunction();
final SABRInterestRateCorrelationParameters modelParameters = new SABRInterestRateCorrelationParameters(alphaSurface, betaSurface, rhoSurface, nuSurface, correlationFunction);
return new SABRInterestRateDataBundle(modelParameters, yieldCurves);
}
@Override
protected ValueProperties.Builder createValueProperties(final Currency currency) {
return createValueProperties()
.with(ValuePropertyNames.CURRENCY, currency.getCode())
.with(ValuePropertyNames.CURVE_CURRENCY, currency.getCode())
.withAny(ValuePropertyNames.CURVE_CALCULATION_CONFIG)
.withAny(ValuePropertyNames.CURVE)
.withAny(SurfaceAndCubePropertyNames.PROPERTY_CUBE_DEFINITION)
.withAny(SurfaceAndCubePropertyNames.PROPERTY_CUBE_SPECIFICATION)
.withAny(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_DEFINITION)
.withAny(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_SPECIFICATION)
.withAny(SmileFittingPropertyNamesAndValues.PROPERTY_FITTING_METHOD)
.with(SmileFittingPropertyNamesAndValues.PROPERTY_VOLATILITY_MODEL, SmileFittingPropertyNamesAndValues.SABR)
.with(ValuePropertyNames.CALCULATION_METHOD, SABRFunction.SABR_NO_EXTRAPOLATION);
}
@Override
protected ValueProperties.Builder createValueProperties(final ComputationTarget target, final ValueRequirement desiredValue) {
final String cubeDefinitionName = desiredValue.getConstraint(SurfaceAndCubePropertyNames.PROPERTY_CUBE_DEFINITION);
final String cubeSpecificationName = desiredValue.getConstraint(SurfaceAndCubePropertyNames.PROPERTY_CUBE_SPECIFICATION);
final String surfaceDefinitionName = desiredValue.getConstraint(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_DEFINITION);
final String surfaceSpecificationName = desiredValue.getConstraint(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_SPECIFICATION);
final String currency = FinancialSecurityUtils.getCurrency(target.getSecurity()).getCode();
final String curveCalculationConfig = desiredValue.getConstraint(ValuePropertyNames.CURVE_CALCULATION_CONFIG);
final String fittingMethod = desiredValue.getConstraint(SmileFittingPropertyNamesAndValues.PROPERTY_FITTING_METHOD);
final String curveName = desiredValue.getConstraint(ValuePropertyNames.CURVE);
return createValueProperties()
.with(ValuePropertyNames.CURRENCY, currency)
.with(ValuePropertyNames.CURVE_CURRENCY, currency)
.with(ValuePropertyNames.CURVE_CALCULATION_CONFIG, curveCalculationConfig)
.with(ValuePropertyNames.CURVE, curveName)
.with(SurfaceAndCubePropertyNames.PROPERTY_CUBE_DEFINITION, cubeDefinitionName)
.with(SurfaceAndCubePropertyNames.PROPERTY_CUBE_SPECIFICATION, cubeSpecificationName)
.with(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_DEFINITION, surfaceDefinitionName)
.with(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_SPECIFICATION, surfaceSpecificationName)
.with(SmileFittingPropertyNamesAndValues.PROPERTY_FITTING_METHOD, fittingMethod)
.with(SmileFittingPropertyNamesAndValues.PROPERTY_VOLATILITY_MODEL, SmileFittingPropertyNamesAndValues.SABR)
.with(ValuePropertyNames.CALCULATION_METHOD, SABRFunction.SABR_NO_EXTRAPOLATION);
}
@Override
protected PresentValueNodeSensitivityCalculator getNodeSensitivityCalculator(final ValueRequirement desiredValue) {
return NSC;
}
private static DoubleFunction1D getCorrelationFunction() {
return new DoubleFunction1D() {
@Override
public Double evaluate(final Double x) {
return 0.8;
}
};
}
}
| jeorme/OG-Platform | projects/OG-Financial/src/main/java/com/opengamma/financial/analytics/model/sabrcube/SABRCMSSpreadNoExtrapolationYCNSFunction.java | Java | apache-2.0 | 6,665 |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.databridge.agent.conf;
import org.apache.commons.pool.impl.GenericKeyedObjectPool;
import org.wso2.carbon.databridge.agent.util.DataEndpointConstants;
public class DataEndpointConfiguration {
private String receiverURL;
private String authURL;
private String username;
private String password;
private GenericKeyedObjectPool transportPool;
private GenericKeyedObjectPool securedTransportPool;
private int batchSize;
private String publisherKey;
private String authKey;
private String sessionId;
private int corePoolSize;
private int maxPoolSize;
private int keepAliveTimeInPool;
public enum Protocol {
TCP, SSL;
@Override
public String toString() {
return super.toString().toLowerCase();
}
}
public DataEndpointConfiguration(String receiverURL, String authURL, String username, String password,
GenericKeyedObjectPool transportPool,
GenericKeyedObjectPool securedTransportPool,
int batchSize, int corePoolSize, int maxPoolSize, int keepAliveTimeInPool) {
this.receiverURL = receiverURL;
this.authURL = authURL;
this.username = username;
this.password = password;
this.transportPool = transportPool;
this.securedTransportPool = securedTransportPool;
this.publisherKey = this.receiverURL + DataEndpointConstants.SEPARATOR + username +
DataEndpointConstants.SEPARATOR + password;
this.authKey = this.authURL + DataEndpointConstants.SEPARATOR + username +
DataEndpointConstants.SEPARATOR + password;
this.batchSize = batchSize;
this.corePoolSize = corePoolSize;
this.maxPoolSize = maxPoolSize;
this.keepAliveTimeInPool = keepAliveTimeInPool;
}
public String getReceiverURL() {
return receiverURL;
}
public String getUsername() {
return username;
}
public String getAuthURL() {
return authURL;
}
public String getPassword() {
return password;
}
public String toString() {
return "ReceiverURL: " + receiverURL + "," +
"Authentication URL: " + authURL + "," +
"Username: " + username;
}
public String getPublisherKey() {
return publisherKey;
}
public String getAuthKey() {
return authKey;
}
public String getSessionId() {
return sessionId;
}
public void setSessionId(String sessionId) {
this.sessionId = sessionId;
}
public GenericKeyedObjectPool getTransportPool() {
return transportPool;
}
public GenericKeyedObjectPool getSecuredTransportPool() {
return securedTransportPool;
}
public int getCorePoolSize() {
return corePoolSize;
}
public int getMaxPoolSize() {
return maxPoolSize;
}
public int getKeepAliveTimeInPool() {
return keepAliveTimeInPool;
}
public int getBatchSize() {
return batchSize;
}
}
| keizer619/carbon-analytics-common | components/data-bridge/org.wso2.carbon.databridge.agent/src/main/java/org/wso2/carbon/databridge/agent/conf/DataEndpointConfiguration.java | Java | apache-2.0 | 3,842 |
// This may look like C code, but it's really -*- C++ -*-
/*
* Copyright (C) 2008 Emweb bvba, Kessel-Lo, Belgium.
*
* See the LICENSE file for terms of use.
*/
#ifndef WT_DBO_FIELD_IMPL_H_
#define WT_DBO_FIELD_IMPL_H_
#include <Wt/Dbo/Session>
#include <Wt/Dbo/Exception>
#include <Wt/Dbo/SqlStatement>
#include <Wt/Dbo/SqlTraits>
#include <Wt/Dbo/DbAction>
namespace Wt {
namespace Dbo {
template <typename V>
FieldRef<V>::FieldRef(V& value, const std::string& name, int size)
: value_(value),
name_(name),
size_(size)
{ }
template <typename V>
const std::string& FieldRef<V>::name() const
{
return name_;
}
template <typename V>
int FieldRef<V>::size() const
{
return size_;
}
template <typename V>
std::string FieldRef<V>::sqlType(Session& session) const
{
return sql_value_traits<V>::type(session.connection(false), size_);
}
template <typename V>
const std::type_info *FieldRef<V>::type() const
{
return &typeid(V);
}
template <typename V>
void FieldRef<V>::bindValue(SqlStatement *statement, int column) const
{
sql_value_traits<V>::bind(value_, statement, column, size_);
}
template <typename V>
void FieldRef<V>::setValue(Session& session, SqlStatement *statement,
int column) const
{
sql_value_traits<V>::read(value_, statement, column, size_);
}
template <class C>
CollectionRef<C>::CollectionRef(collection< ptr<C> >& value,
RelationType type,
const std::string& joinName,
const std::string& joinId,
int fkConstraints)
: value_(value), joinName_(joinName), joinId_(joinId), type_(type),
fkConstraints_(fkConstraints)
{ }
template <class C>
PtrRef<C>::PtrRef(ptr<C>& value, const std::string& name, int size,
int fkConstraints)
: value_(value),
name_(name),
size_(size),
fkConstraints_(fkConstraints)
{ }
template <class C, class A, class Enable = void>
struct LoadLazyHelper
{
static void loadLazy(ptr<C>& p, typename dbo_traits<C>::IdType id,
Session *session) { }
};
template <class C, class A>
struct LoadLazyHelper<C, A, typename boost::enable_if<action_sets_value<A> >::type>
{
static void loadLazy(ptr<C>& p, typename dbo_traits<C>::IdType id,
Session *session) {
if (!(id == dbo_traits<C>::invalidId())) {
if (session)
p = session->loadLazy<C>(id);
else
throw Exception("Could not load referenced Dbo::ptr, no session?");
}
}
};
template <class C>
template <class A>
void PtrRef<C>::visit(A& action, Session *session) const
{
typename dbo_traits<C>::IdType id;
if (action.setsValue())
id = dbo_traits<C>::invalidId();
else
id = value_.id();
std::string idFieldName = "stub";
int size = size_;
if (session) {
Impl::MappingInfo *mapping = session->getMapping<C>();
action.actMapping(mapping);
idFieldName = mapping->naturalIdFieldName;
size = mapping->naturalIdFieldSize;
if (idFieldName.empty())
idFieldName = mapping->surrogateIdFieldName;
}
field(action, id, name_ + "_" + idFieldName, size);
LoadLazyHelper<C, A>::loadLazy(value_, id, session);
}
template <class C>
WeakPtrRef<C>::WeakPtrRef(weak_ptr<C>& value, const std::string& joinName)
: value_(value),
joinName_(joinName)
{ }
template <class C>
const std::type_info *PtrRef<C>::type() const
{
return &typeid(typename dbo_traits<C>::IdType);
}
template <class A, typename V>
void id(A& action, V& value, const std::string& name, int size)
{
action.actId(value, name, size);
}
template <class A, class C>
void id(A& action, ptr<C>& value, const std::string& name,
ForeignKeyConstraint constraint, int size)
{
action.actId(value, name, size, constraint.value());
}
template <class A, typename V>
void field(A& action, V& value, const std::string& name, int size)
{
action.act(FieldRef<V>(value, name, size));
}
template <class A, class C>
void field(A& action, ptr<C>& value, const std::string& name, int size)
{
action.actPtr(PtrRef<C>(value, name, size, 0));
}
template <class A, class C>
void belongsToImpl(A& action, ptr<C>& value, const std::string& name,
int fkConstraints, int size)
{
if (name.empty() && action.session())
action.actPtr(PtrRef<C>(value, action.session()->template tableName<C>(),
size, fkConstraints));
else
action.actPtr(PtrRef<C>(value, name, size, fkConstraints));
}
template <class A, class C>
void belongsTo(A& action, ptr<C>& value, const std::string& name, int size)
{
belongsToImpl(action, value, name, 0, size);
}
template <class A, class C>
void belongsTo(A& action, ptr<C>& value, const std::string& name,
ForeignKeyConstraint constraint, int size)
{
belongsToImpl(action, value, name, constraint.value(), size);
}
template <class A, class C>
void belongsTo(A& action, ptr<C>& value,
ForeignKeyConstraint constraint, int size)
{
belongsToImpl(action, value, std::string(), constraint.value(), size);
}
template <class A, class C>
void hasOne(A& action, weak_ptr<C>& value, const std::string& joinName)
{
action.actWeakPtr(WeakPtrRef<C>(value, joinName));
}
template <class A, class C>
void hasMany(A& action, collection< ptr<C> >& value,
RelationType type, const std::string& joinName)
{
action.actCollection(CollectionRef<C>(value, type, joinName, std::string(),
Impl::FKNotNull |
Impl::FKOnDeleteCascade));
}
template <class A, class C>
void hasMany(A& action, collection< ptr<C> >& value,
RelationType type, const std::string& joinName,
const std::string& joinId, ForeignKeyConstraint constraint)
{
if (type != ManyToMany)
throw Exception("hasMany() with named joinId only for a ManyToMany relation");
action.actCollection(CollectionRef<C>(value, type, joinName, joinId,
constraint.value()));
}
}
}
#endif // WT_DBO_FIELD_IMPL_H_
| sanathkumarv/RestAPIWt | tools/wt-3.3.5-rc1/src/Wt/Dbo/Field_impl.h | C | apache-2.0 | 5,829 |
/*
* Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.operation.reference.doc;
import io.crate.operation.reference.doc.lucene.IntegerColumnReference;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.FieldMapper;
import org.junit.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
public class IntegerColumnReferenceTest extends DocLevelExpressionsTest {
@Override
protected void insertValues(IndexWriter writer) throws Exception {
for (int i = -10; i<10; i++) {
Document doc = new Document();
doc.add(new StringField("_id", Integer.toString(i), Field.Store.NO));
doc.add(new IntField(fieldName().name(), i, Field.Store.NO));
writer.addDocument(doc);
}
}
@Override
protected FieldMapper.Names fieldName() {
return new FieldMapper.Names("i");
}
@Override
protected FieldDataType fieldType() {
return new FieldDataType("int");
}
@Test
public void testFieldCacheExpression() throws Exception {
IntegerColumnReference integerColumn = new IntegerColumnReference(fieldName().name());
integerColumn.startCollect(ctx);
integerColumn.setNextReader(readerContext);
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 20);
int i = -10;
for (ScoreDoc doc : topDocs.scoreDocs) {
integerColumn.setNextDocId(doc.doc);
assertThat(integerColumn.value(), is(i));
i++;
}
}
}
| gmrodrigues/crate | sql/src/test/java/io/crate/operation/reference/doc/IntegerColumnReferenceTest.java | Java | apache-2.0 | 3,042 |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
******************************************************************************/
package org.apache.olingo.odata2.core.batch;
import java.util.List;
import org.apache.olingo.odata2.api.batch.BatchResponsePart;
import org.apache.olingo.odata2.api.processor.ODataResponse;
public class BatchResponsePartImpl extends BatchResponsePart {
private List<ODataResponse> responses;
private boolean isChangeSet;
@Override
public List<ODataResponse> getResponses() {
return responses;
}
@Override
public boolean isChangeSet() {
return isChangeSet;
}
public class BatchResponsePartBuilderImpl extends BatchResponsePartBuilder {
private List<ODataResponse> responses;
private boolean isChangeSet;
@Override
public BatchResponsePart build() {
BatchResponsePartImpl.this.responses = responses;
BatchResponsePartImpl.this.isChangeSet = isChangeSet;
return BatchResponsePartImpl.this;
}
@Override
public BatchResponsePartBuilder responses(final List<ODataResponse> responses) {
this.responses = responses;
return this;
}
@Override
public BatchResponsePartBuilder changeSet(final boolean isChangeSet) {
this.isChangeSet = isChangeSet;
return this;
}
}
}
| apache/olingo-odata2 | odata2-lib/odata-core/src/main/java/org/apache/olingo/odata2/core/batch/BatchResponsePartImpl.java | Java | apache-2.0 | 2,137 |
/// <reference path='fourslash.ts' />
// @allowjs: true
// @checkJs: true
// @noEmit: true
// @filename: a.js
////// @ts-check
////let x = "";
////[|x|] = 1;
// verify.codeFixAvailable([
// { description: ts.Diagnostics.Ignore_this_error_message.message },
// { description: ts.Diagnostics.Disable_checking_for_this_file.message }
// ]);
verify.codeFix({
description: ts.Diagnostics.Disable_checking_for_this_file.message,
index: 1,
newFileContent:
`// @ts-nocheck
let x = "";
x = 1;`,
});
| Microsoft/TypeScript | tests/cases/fourslash/codeFixDisableJsDiagnosticsInFile9.ts | TypeScript | apache-2.0 | 538 |
//-----------------------------------------------------------------------
// <copyright file="SourceSpec.cs" company="Akka.NET Project">
// Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Akka.Streams.Dsl;
using Akka.Streams.TestKit;
using Akka.Streams.TestKit.Tests;
using FluentAssertions;
using Xunit;
using Xunit.Abstractions;
namespace Akka.Streams.Tests.Dsl
{
public class SourceSpec : AkkaSpec
{
private ActorMaterializer Materializer { get; }
public SourceSpec(ITestOutputHelper helper) : base(helper)
{
Materializer = ActorMaterializer.Create(Sys);
}
[Fact]
public void Single_Source_must_produce_element()
{
var p = Source.Single(1).RunWith(Sink.AsPublisher<int>(false), Materializer);
var c = TestSubscriber.CreateManualProbe<int>(this);
p.Subscribe(c);
var sub = c.ExpectSubscription();
sub.Request(1);
c.ExpectNext(1);
c.ExpectComplete();
}
[Fact]
public void Single_Source_must_reject_later_subscriber()
{
var p = Source.Single(1).RunWith(Sink.AsPublisher<int>(false), Materializer);
var c1 = TestSubscriber.CreateManualProbe<int>(this);
var c2 = TestSubscriber.CreateManualProbe<int>(this);
p.Subscribe(c1);
var sub1 = c1.ExpectSubscription();
sub1.Request(1);
c1.ExpectNext(1);
c1.ExpectComplete();
p.Subscribe(c2);
c2.ExpectSubscriptionAndError();
}
[Fact]
public void Empty_Source_must_complete_immediately()
{
var p = Source.Empty<int>().RunWith(Sink.AsPublisher<int>(false), Materializer);
var c = TestSubscriber.CreateManualProbe<int>(this);
p.Subscribe(c);
c.ExpectSubscriptionAndComplete();
//reject additional subscriber
var c2 = TestSubscriber.CreateManualProbe<int>(this);
p.Subscribe(c2);
c2.ExpectSubscriptionAndError();
}
[Fact]
public void Failed_Source_must_emit_error_immediately()
{
var ex = new SystemException();
var p = Source.Failed<int>(ex).RunWith(Sink.AsPublisher<int>(false), Materializer);
var c = TestSubscriber.CreateManualProbe<int>(this);
p.Subscribe(c);
c.ExpectSubscriptionAndError();
//reject additional subscriber
var c2 = TestSubscriber.CreateManualProbe<int>(this);
p.Subscribe(c2);
c2.ExpectSubscriptionAndError();
}
[Fact]
public void Maybe_Source_must_complete_materialized_future_with_None_when_stream_cancels()
{
this.AssertAllStagesStopped(() =>
{
var neverSource = Source.Maybe<object>();
var pubSink = Sink.AsPublisher<object>(false);
var t = neverSource.ToMaterialized(pubSink, Keep.Both).Run(Materializer);
var f = t.Item1;
var neverPub = t.Item2;
var c = TestSubscriber.CreateManualProbe<object>(this);
neverPub.Subscribe(c);
var subs = c.ExpectSubscription();
subs.Request(1000);
c.ExpectNoMsg(TimeSpan.FromMilliseconds(300));
subs.Cancel();
f.Task.Wait(500).Should().BeTrue();
f.Task.Result.Should().Be(null);
}, Materializer);
}
[Fact]
public void Maybe_Source_must_allow_external_triggering_of_empty_completion()
{
this.AssertAllStagesStopped(() =>
{
var neverSource = Source.Maybe<int>().Where(_ => false);
var counterSink = Sink.Aggregate<int, int>(0, (acc, _) => acc + 1);
var t = neverSource.ToMaterialized(counterSink, Keep.Both).Run(Materializer);
var neverPromise = t.Item1;
var counterFuture = t.Item2;
//external cancellation
neverPromise.TrySetResult(0).Should().BeTrue();
counterFuture.Wait(500).Should().BeTrue();
counterFuture.Result.Should().Be(0);
}, Materializer);
}
[Fact]
public void Maybe_Source_must_allow_external_triggering_of_non_empty_completion()
{
this.AssertAllStagesStopped(() =>
{
var neverSource = Source.Maybe<int>();
var counterSink = Sink.First<int>();
var t = neverSource.ToMaterialized(counterSink, Keep.Both).Run(Materializer);
var neverPromise = t.Item1;
var counterFuture = t.Item2;
//external cancellation
neverPromise.TrySetResult(6).Should().BeTrue();
counterFuture.Wait(500).Should().BeTrue();
counterFuture.Result.Should().Be(6);
}, Materializer);
}
[Fact]
public void Maybe_Source_must_allow_external_triggering_of_OnError()
{
this.AssertAllStagesStopped(() =>
{
var neverSource = Source.Maybe<int>();
var counterSink = Sink.First<int>();
var t = neverSource.ToMaterialized(counterSink, Keep.Both).Run(Materializer);
var neverPromise = t.Item1;
var counterFuture = t.Item2;
//external cancellation
neverPromise.SetException(new Exception("Boom"));
counterFuture.Invoking(f => f.Wait(500)).ShouldThrow<Exception>()
.WithMessage("Boom");
}, Materializer);
}
[Fact]
public void Composite_Source_must_merge_from_many_inputs()
{
var probes = Enumerable.Range(1, 5).Select(_ => TestPublisher.CreateManualProbe<int>(this)).ToList();
var source = Source.AsSubscriber<int>();
var outProbe = TestSubscriber.CreateManualProbe<int>(this);
var s =
Source.FromGraph(GraphDsl.Create(source, source, source, source, source,
(a, b, c, d, e) => new[] {a, b, c, d, e},
(b, i0, i1, i2, i3, i4) =>
{
var m = b.Add(new Merge<int>(5));
b.From(i0.Outlet).To(m.In(0));
b.From(i1.Outlet).To(m.In(1));
b.From(i2.Outlet).To(m.In(2));
b.From(i3.Outlet).To(m.In(3));
b.From(i4.Outlet).To(m.In(4));
return new SourceShape<int>(m.Out);
})).To(Sink.FromSubscriber(outProbe)).Run(Materializer);
for (var i = 0; i < 5; i++)
probes[i].Subscribe(s[i]);
var sub = outProbe.ExpectSubscription();
sub.Request(10);
for (var i = 0; i < 5; i++)
{
var subscription = probes[i].ExpectSubscription();
subscription.ExpectRequest();
subscription.SendNext(i);
subscription.SendComplete();
}
var gotten = new List<int>();
for (var i = 0; i < 5; i++)
gotten.Add(outProbe.ExpectNext());
gotten.ShouldAllBeEquivalentTo(new[] {0, 1, 2, 3, 4});
outProbe.ExpectComplete();
}
[Fact]
public void Composite_Source_must_combine_from_many_inputs_with_simplified_API()
{
var probes = Enumerable.Range(1, 3).Select(_ => TestPublisher.CreateManualProbe<int>(this)).ToList();
var source = probes.Select(Source.FromPublisher).ToList();
var outProbe = TestSubscriber.CreateManualProbe<int>(this);
Source.Combine(source[0], source[1], i => new Merge<int, int>(i), source[2])
.To(Sink.FromSubscriber(outProbe))
.Run(Materializer);
var sub = outProbe.ExpectSubscription();
sub.Request(3);
for (var i = 0; i < 3; i++)
{
var s = probes[i].ExpectSubscription();
s.ExpectRequest();
s.SendNext(i);
s.SendComplete();
}
var gotten = new List<int>();
for (var i = 0; i < 3; i++)
gotten.Add(outProbe.ExpectNext());
gotten.ShouldAllBeEquivalentTo(new[] {0, 1, 2});
outProbe.ExpectComplete();
}
[Fact]
public void Composite_Source_must_combine_from_two_inputs_with_simplified_API()
{
var probes = Enumerable.Range(1, 2).Select(_ => TestPublisher.CreateManualProbe<int>(this)).ToList();
var source = probes.Select(Source.FromPublisher).ToList();
var outProbe = TestSubscriber.CreateManualProbe<int>(this);
Source.Combine(source[0], source[1], i => new Merge<int, int>(i))
.To(Sink.FromSubscriber(outProbe))
.Run(Materializer);
var sub = outProbe.ExpectSubscription();
sub.Request(3);
for (var i = 0; i < 2; i++)
{
var s = probes[i].ExpectSubscription();
s.ExpectRequest();
s.SendNext(i);
s.SendComplete();
}
var gotten = new List<int>();
for (var i = 0; i < 2; i++)
gotten.Add(outProbe.ExpectNext());
gotten.ShouldAllBeEquivalentTo(new[] {0, 1});
outProbe.ExpectComplete();
}
[Fact]
public void Repeat_Source_must_repeat_as_long_as_it_takes()
{
var f = Source.Repeat(42).Grouped(1000).RunWith(Sink.First<IEnumerable<int>>(), Materializer);
f.Result.Should().HaveCount(1000).And.Match(x => x.All(i => i == 42));
}
private static readonly int[] Expected = {
9227465, 5702887, 3524578, 2178309, 1346269, 832040, 514229, 317811, 196418, 121393, 75025, 46368, 28657, 17711,
10946, 6765, 4181, 2584, 1597, 987, 610, 377, 233, 144, 89, 55, 34, 21, 13, 8, 5, 3, 2, 1, 1, 0
};
[Fact]
public void Unfold_Source_must_generate_a_finite_fibonacci_sequence()
{
Source.Unfold(Tuple.Create(0, 1), tuple =>
{
var a = tuple.Item1;
var b = tuple.Item2;
if (a > 10000000)
return null;
return Tuple.Create(Tuple.Create(b, a + b), a);
}).RunAggregate(new LinkedList<int>(), (ints, i) =>
{
ints.AddFirst(i);
return ints;
}, Materializer).Result.Should().Equal(Expected);
}
[Fact]
public void Unfold_Source_must_terminate_with_a_failure_if_there_is_an_exception_thrown()
{
EventFilter.Exception<SystemException>(message: "expected").ExpectOne(() =>
{
var task = Source.Unfold(Tuple.Create(0, 1), tuple =>
{
var a = tuple.Item1;
var b = tuple.Item2;
if (a > 10000000)
throw new SystemException("expected");
return Tuple.Create(Tuple.Create(b, a + b), a);
}).RunAggregate(new LinkedList<int>(), (ints, i) =>
{
ints.AddFirst(i);
return ints;
}, Materializer);
task.Invoking(t => t.Wait(TimeSpan.FromSeconds(3)))
.ShouldThrow<SystemException>()
.WithMessage("expected");
});
}
[Fact]
public void Unfold_Source_must_generate_a_finite_fibonacci_sequence_asynchronously()
{
Source.UnfoldAsync(Tuple.Create(0, 1), tuple =>
{
var a = tuple.Item1;
var b = tuple.Item2;
if (a > 10000000)
return Task.FromResult<Tuple<Tuple<int, int>, int>>(null);
return Task.FromResult(Tuple.Create(Tuple.Create(b, a + b), a));
}).RunAggregate(new LinkedList<int>(), (ints, i) =>
{
ints.AddFirst(i);
return ints;
}, Materializer).Result.Should().Equal(Expected);
}
[Fact]
public void Unfold_Source_must_generate_a_unboundeed_fibonacci_sequence()
{
Source.Unfold(Tuple.Create(0, 1), tuple =>
{
var a = tuple.Item1;
var b = tuple.Item2;
return Tuple.Create(Tuple.Create(b, a + b), a);
})
.Take(36)
.RunAggregate(new LinkedList<int>(), (ints, i) =>
{
ints.AddFirst(i);
return ints;
}, Materializer).Result.Should().Equal(Expected);
}
[Fact]
public void Iterator_Source_must_properly_iterate()
{
var expected = new[] {false, true, false, true, false, true, false, true, false, true }.ToList();
Source.FromEnumerator(() => expected.GetEnumerator())
.Grouped(10)
.RunWith(Sink.First<IEnumerable<bool>>(), Materializer)
.Result.Should()
.Equal(expected);
}
[Fact]
public void A_Source_must_suitably_override_attribute_handling_methods()
{
Source.Single(42).Async().AddAttributes(Attributes.None).Named("");
}
}
}
| derwasp/akka.net | src/core/Akka.Streams.Tests/Dsl/SourceSpec.cs | C# | apache-2.0 | 14,043 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.diskPerf;
import java.util.Arrays;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.LogWriter;
import org.apache.geode.internal.cache.DiskRegionHelperFactory;
import org.apache.geode.internal.cache.DiskRegionProperties;
import org.apache.geode.internal.cache.DiskRegionTestingBase;
import org.apache.geode.test.junit.categories.IntegrationTest;
/**
* Disk region Perf test for Overflow only with Sync writes. 1) Performance of get operation for
* entry in memory.
*/
@Category(IntegrationTest.class)
public class DiskRegOverflowSyncGetInMemPerfJUnitPerformanceTest extends DiskRegionTestingBase {
private static int ENTRY_SIZE = 1024;
private static int OP_COUNT = 10000;
private static int counter = 0;
private LogWriter log = null;
private DiskRegionProperties diskProps = new DiskRegionProperties();
@Override
protected final void preSetUp() throws Exception {
diskProps.setDiskDirs(dirs);
}
@Override
protected final void postSetUp() throws Exception {
diskProps.setOverFlowCapacity(100000);
region = DiskRegionHelperFactory.getSyncOverFlowOnlyRegion(cache, diskProps);
log = ds.getLogWriter();
}
@Override
protected final void postTearDown() throws Exception {
if (cache != null) {
cache.close();
}
if (ds != null) {
ds.disconnect();
}
}
@Test
public void testPopulatefor1Kbwrites() {
// RegionAttributes ra = region.getAttributes();
// final String key = "K";
final byte[] value = new byte[ENTRY_SIZE];
Arrays.fill(value, (byte) 77);
long startTime = System.currentTimeMillis();
for (int i = 0; i < OP_COUNT; i++) {
region.put("" + (i + 10000), value);
}
long endTime = System.currentTimeMillis();
System.out.println(" done with putting");
// Now get all the entries which are in memory.
long startTimeGet = System.currentTimeMillis();
for (int i = 0; i < OP_COUNT; i++) {
region.get("" + (i + 10000));
}
long endTimeGet = System.currentTimeMillis();
System.out.println(" done with getting");
region.close(); // closes disk file which will flush all buffers
float et = endTime - startTime;
float etSecs = et / 1000f;
float opPerSec = etSecs == 0 ? 0 : (OP_COUNT / (et / 1000f));
float bytesPerSec = etSecs == 0 ? 0 : ((OP_COUNT * ENTRY_SIZE) / (et / 1000f));
String stats = "et=" + et + "ms writes/sec=" + opPerSec + " bytes/sec=" + bytesPerSec;
log.info(stats);
System.out.println("Stats for 1 kb writes:" + stats);
// Perf stats for get op
float etGet = endTimeGet - startTimeGet;
float etSecsGet = etGet / 1000f;
float opPerSecGet = etSecsGet == 0 ? 0 : (OP_COUNT / (etGet / 1000f));
float bytesPerSecGet = etSecsGet == 0 ? 0 : ((OP_COUNT * ENTRY_SIZE) / (etGet / 1000f));
String statsGet = "et=" + etGet + "ms gets/sec=" + opPerSecGet + " bytes/sec=" + bytesPerSecGet;
log.info(statsGet);
System.out.println("Perf Stats of get which is in memory :" + statsGet);
}
}
| smanvi-pivotal/geode | geode-core/src/test/java/org/apache/geode/internal/cache/diskPerf/DiskRegOverflowSyncGetInMemPerfJUnitPerformanceTest.java | Java | apache-2.0 | 3,891 |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: Protos.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace Messages {
/// <summary>Holder for reflection information generated from Protos.proto</summary>
public static partial class ProtosReflection {
#region Descriptor
/// <summary>File descriptor for Protos.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static ProtosReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CgxQcm90b3MucHJvdG8SCG1lc3NhZ2VzIh0KDVJlbmFtZUNvbW1hbmQSDAoE",
"bmFtZRgBIAEoCSIbCgtSZW5hbWVFdmVudBIMCgRuYW1lGAEgASgJIhUKBVN0",
"YXRlEgwKBE5hbWUYASABKAlCC6oCCE1lc3NhZ2VzYgZwcm90bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::Messages.RenameCommand), global::Messages.RenameCommand.Parser, new[]{ "Name" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Messages.RenameEvent), global::Messages.RenameEvent.Parser, new[]{ "Name" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Messages.State), global::Messages.State.Parser, new[]{ "Name" }, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class RenameCommand : pb::IMessage<RenameCommand> {
private static readonly pb::MessageParser<RenameCommand> _parser = new pb::MessageParser<RenameCommand>(() => new RenameCommand());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<RenameCommand> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Messages.ProtosReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public RenameCommand() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public RenameCommand(RenameCommand other) : this() {
name_ = other.name_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public RenameCommand Clone() {
return new RenameCommand(this);
}
/// <summary>Field number for the "name" field.</summary>
public const int NameFieldNumber = 1;
private string name_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Name {
get { return name_; }
set {
name_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as RenameCommand);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(RenameCommand other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Name != other.Name) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Name.Length != 0) hash ^= Name.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Name.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Name);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Name.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Name);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(RenameCommand other) {
if (other == null) {
return;
}
if (other.Name.Length != 0) {
Name = other.Name;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Name = input.ReadString();
break;
}
}
}
}
}
public sealed partial class RenameEvent : pb::IMessage<RenameEvent> {
private static readonly pb::MessageParser<RenameEvent> _parser = new pb::MessageParser<RenameEvent>(() => new RenameEvent());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<RenameEvent> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Messages.ProtosReflection.Descriptor.MessageTypes[1]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public RenameEvent() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public RenameEvent(RenameEvent other) : this() {
name_ = other.name_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public RenameEvent Clone() {
return new RenameEvent(this);
}
/// <summary>Field number for the "name" field.</summary>
public const int NameFieldNumber = 1;
private string name_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Name {
get { return name_; }
set {
name_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as RenameEvent);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(RenameEvent other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Name != other.Name) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Name.Length != 0) hash ^= Name.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Name.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Name);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Name.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Name);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(RenameEvent other) {
if (other == null) {
return;
}
if (other.Name.Length != 0) {
Name = other.Name;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Name = input.ReadString();
break;
}
}
}
}
}
public sealed partial class State : pb::IMessage<State> {
private static readonly pb::MessageParser<State> _parser = new pb::MessageParser<State>(() => new State());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<State> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Messages.ProtosReflection.Descriptor.MessageTypes[2]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public State() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public State(State other) : this() {
name_ = other.name_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public State Clone() {
return new State(this);
}
/// <summary>Field number for the "Name" field.</summary>
public const int NameFieldNumber = 1;
private string name_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Name {
get { return name_; }
set {
name_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as State);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(State other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Name != other.Name) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Name.Length != 0) hash ^= Name.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Name.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Name);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Name.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Name);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(State other) {
if (other == null) {
return;
}
if (other.Name.Length != 0) {
Name = other.Name;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Name = input.ReadString();
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| masteryee/protoactor-dotnet | examples/Persistence/Messages/Protos.g.cs | C# | apache-2.0 | 12,536 |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.tools.ant.taskdefs.optional;
// -- Batik classes ----------------------------------------------------------
import org.apache.batik.transcoder.Transcoder;
import org.apache.batik.apps.rasterizer.SVGConverterController;
import org.apache.batik.apps.rasterizer.SVGConverterSource;
// -- Ant classes ------------------------------------------------------------
import org.apache.tools.ant.Task;
// -- Java SDK classes -------------------------------------------------------
import java.io.File;
import java.util.Map;
import java.util.List;
/**
* Implements simple controller for the <code>SVGConverter</code> operation.
*
* <p>This is almost the same as the
* {@link org.apache.batik.apps.rasterizer.DefaultSVGConverterController DefaultSVGConverterController}
* except this produces error message when the conversion fails.</p>
*
* <p>See {@link SVGConverterController} for the method documentation.</p>
*
* @see SVGConverterController SVGConverterController
* @see org.apache.batik.apps.rasterizer.DefaultSVGConverterController DefaultSVGConverterController
*
* @author <a href="mailto:ruini@iki.fi">Henri Ruini</a>
* @version $Id: RasterizerTaskSVGConverterController.java 479617 2006-11-27 13:43:51Z dvholten $
*/
public class RasterizerTaskSVGConverterController implements SVGConverterController {
// -- Variables ----------------------------------------------------------
/** Ant task that is used to log messages. */
protected Task executingTask = null;
// -- Constructors -------------------------------------------------------
/**
* Don't allow public usage.
*/
protected RasterizerTaskSVGConverterController() {
}
/**
* Sets the given Ant task to receive log messages.
*
* @param task Ant task. The value can be <code>null</code> when log messages won't be written.
*/
public RasterizerTaskSVGConverterController(Task task) {
executingTask = task;
}
// -- Public interface ---------------------------------------------------
public boolean proceedWithComputedTask(Transcoder transcoder,
Map hints,
List sources,
List dest){
return true;
}
public boolean proceedWithSourceTranscoding(SVGConverterSource source,
File dest) {
return true;
}
public boolean proceedOnSourceTranscodingFailure(SVGConverterSource source,
File dest,
String errorCode){
if(executingTask != null) {
executingTask.log("Unable to rasterize image '"
+ source.getName() + "' to '"
+ dest.getAbsolutePath() + "': " + errorCode);
}
return true;
}
public void onSourceTranscodingSuccess(SVGConverterSource source,
File dest){
}
}
| stumoodie/PathwayEditor | libs/batik-1.7/contrib/rasterizertask/sources/org/apache/tools/ant/taskdefs/optional/RasterizerTaskSVGConverterController.java | Java | apache-2.0 | 3,878 |
// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include "paddle/fluid/operators/math/math_function.h"
#include "paddle/fluid/platform/dynload/cublas.h"
namespace paddle {
namespace operators {
namespace math {
template <typename T>
struct CUBlas;
template <>
struct CUBlas<float> {
template <typename... ARGS>
static void GEMM(ARGS... args) {
PADDLE_ENFORCE(platform::dynload::cublasSgemm(args...));
}
template <typename... ARGS>
static void AXPY(ARGS... args) {
PADDLE_ENFORCE(platform::dynload::cublasSaxpy(args...));
}
template <typename... ARGS>
static void GEMV(ARGS... args) {
PADDLE_ENFORCE(platform::dynload::cublasSgemv(args...));
}
template <typename... ARGS>
static void GEMM_BATCH(ARGS... args) {
#if CUDA_VERSION >= 8000
PADDLE_ENFORCE(platform::dynload::cublasSgemmStridedBatched(args...));
#else
PADDLE_THROW("SgemmStridedBatched is not supported on cuda <= 7.5");
#endif
}
};
template <>
struct CUBlas<double> {
template <typename... ARGS>
static void GEMM(ARGS... args) {
PADDLE_ENFORCE(platform::dynload::cublasDgemm(args...));
}
template <typename... ARGS>
static void AXPY(ARGS... args) {
PADDLE_ENFORCE(platform::dynload::cublasDaxpy(args...));
}
template <typename... ARGS>
static void GEMV(ARGS... args) {
PADDLE_ENFORCE(platform::dynload::cublasDgemv(args...));
}
template <typename... ARGS>
static void GEMM_BATCH(ARGS... args) {
#if CUDA_VERSION >= 8000
PADDLE_ENFORCE(platform::dynload::cublasDgemmStridedBatched(args...));
#else
PADDLE_THROW("DgemmStridedBatched is not supported on cuda <= 7.5");
#endif
}
};
template <>
struct CUBlas<platform::float16> {
using float16 = platform::float16;
static void GEMM(cublasHandle_t handle, cublasOperation_t transa,
cublasOperation_t transb, int m, int n, int k,
const float16 *alpha, const float16 *A, int lda,
const float16 *B, int ldb, const float16 *beta, float16 *C,
int ldc) {
PADDLE_ENFORCE(
platform::dynload::cublasHgemm(handle, transa, transb, m, n, k,
reinterpret_cast<const __half *>(alpha),
reinterpret_cast<const __half *>(A), lda,
reinterpret_cast<const __half *>(B), ldb,
reinterpret_cast<const __half *>(beta),
reinterpret_cast<__half *>(C), ldc));
}
static void GEMM_BATCH(cublasHandle_t handle, cublasOperation_t transa,
cublasOperation_t transb, int m, int n, int k,
const float16 *alpha, const float16 *A, int lda,
long long int strideA, const float16 *B, // NOLINT
int ldb, long long int strideB, // NOLINT
const float16 *beta, float16 *C, int ldc,
long long int strideC, // NOLINT
int batchCount) {
#if CUDA_VERSION >= 8000
PADDLE_ENFORCE(platform::dynload::cublasHgemmStridedBatched(
handle, transa, transb, m, n, k,
reinterpret_cast<const __half *>(alpha),
reinterpret_cast<const __half *>(A), lda, strideA,
reinterpret_cast<const __half *>(B), ldb, strideB,
reinterpret_cast<const __half *>(beta), reinterpret_cast<__half *>(C),
ldc, strideC, batchCount));
#else
PADDLE_THROW("HgemmStridedBatched is not supported on cuda <= 7.5");
#endif
}
};
template <>
template <typename T>
void Blas<platform::CUDADeviceContext>::GEMM(CBLAS_TRANSPOSE transA,
CBLAS_TRANSPOSE transB, int M,
int N, int K, T alpha, const T *A,
const T *B, T beta, T *C) const {
// Note that cublas follows fortran order, so the order is different from
// the cblas convention.
int lda = (transA == CblasNoTrans) ? K : M;
int ldb = (transB == CblasNoTrans) ? N : K;
cublasOperation_t cuTransA =
(transA == CblasNoTrans) ? CUBLAS_OP_N : CUBLAS_OP_T;
cublasOperation_t cuTransB =
(transB == CblasNoTrans) ? CUBLAS_OP_N : CUBLAS_OP_T;
CUBlas<T>::GEMM(context_.cublas_handle(), cuTransB, cuTransA, N, M, K, &alpha,
B, ldb, A, lda, &beta, C, N);
}
template <>
template <>
inline void Blas<platform::CUDADeviceContext>::GEMM(
CBLAS_TRANSPOSE transA, CBLAS_TRANSPOSE transB, int M, int N, int K,
platform::float16 alpha, const platform::float16 *A,
const platform::float16 *B, platform::float16 beta,
platform::float16 *C) const {
// Note that cublas follows fortran order, so the order is different from
// the cblas convention.
int lda = (transA == CblasNoTrans) ? K : M;
int ldb = (transB == CblasNoTrans) ? N : K;
cublasOperation_t cuTransA =
(transA == CblasNoTrans) ? CUBLAS_OP_N : CUBLAS_OP_T;
cublasOperation_t cuTransB =
(transB == CblasNoTrans) ? CUBLAS_OP_N : CUBLAS_OP_T;
// TODO(kexinzhao): add processing code for compute capability < 53 case
PADDLE_ENFORCE_GE(context_.GetComputeCapability(), 53,
"cublas fp16 gemm requires GPU compute capability >= 53");
#if CUDA_VERSION >= 8000
float h_alpha = static_cast<float>(alpha);
float h_beta = static_cast<float>(beta);
cublasGemmAlgo_t algo = CUBLAS_GEMM_DFALT;
#if CUDA_VERSION >= 9000
if (context_.GetComputeCapability() >= 70) {
PADDLE_ENFORCE(platform::dynload::cublasSetMathMode(
context_.cublas_handle(), CUBLAS_TENSOR_OP_MATH));
algo = CUBLAS_GEMM_DFALT_TENSOR_OP;
} else {
PADDLE_ENFORCE(platform::dynload::cublasSetMathMode(
context_.cublas_handle(), CUBLAS_DEFAULT_MATH));
}
#endif // CUDA_VERSION >= 9000
// cublasHgemm does true FP16 computation which is slow for non-Volta
// GPUs. So use cublasGemmEx instead which does pesudo FP16 computation:
// input/output in fp16, computation in fp32, which can also be accelerated
// using tensor cores in volta GPUs.
PADDLE_ENFORCE(platform::dynload::cublasGemmEx(
context_.cublas_handle(), cuTransB, cuTransA, N, M, K, &h_alpha, B,
CUDA_R_16F, ldb, A, CUDA_R_16F, lda, &h_beta, C, CUDA_R_16F, N,
CUDA_R_32F, algo));
#else
// CUDA 7.5 does not support cublasGemmEx, hence we fall back to use hgemm
CUBlas<platform::float16>::GEMM(context_.cublas_handle(), cuTransB, cuTransA,
N, M, K, &h_alpha, h_B, ldb, h_A, lda,
&h_beta, h_C, N);
#endif // CUDA_VERSION >= 8000
}
template <>
template <typename T>
void Blas<platform::CUDADeviceContext>::GEMM(bool transA, bool transB, int M,
int N, int K, T alpha, const T *A,
int lda, const T *B, int ldb,
T beta, T *C, int ldc) const {
// Note that cublas follows fortran order, so the order is different from
// the cblas convention.
cublasOperation_t cuTransA = transA ? CUBLAS_OP_T : CUBLAS_OP_N;
cublasOperation_t cuTransB = transB ? CUBLAS_OP_T : CUBLAS_OP_N;
CUBlas<T>::GEMM(context_.cublas_handle(), cuTransB, cuTransA, N, M, K, &alpha,
B, ldb, A, lda, &beta, C, ldc);
}
template <>
template <typename T>
void Blas<platform::CUDADeviceContext>::AXPY(int n, T alpha, const T *x,
T *y) const {
CUBlas<T>::AXPY(context_.cublas_handle(), n, &alpha, x, 1, y, 1);
}
template <>
template <typename T>
void Blas<platform::CUDADeviceContext>::GEMV(bool trans_a, int M, int N,
T alpha, const T *A, const T *B,
T beta, T *C) const {
cublasOperation_t cuTransA = !trans_a ? CUBLAS_OP_T : CUBLAS_OP_N;
CUBlas<T>::GEMV(context_.cublas_handle(), cuTransA, N, M, &alpha, A, N, B, 1,
&beta, C, 1);
}
template <>
template <typename T>
void Blas<platform::CUDADeviceContext>::BatchedGEMM(
CBLAS_TRANSPOSE transA, CBLAS_TRANSPOSE transB, int M, int N, int K,
T alpha, const T *A, const T *B, T beta, T *C, int batchCount,
int64_t strideA, int64_t strideB) const {
// Note that cublas follows fortran order, so the order is different from
// the cblas convention.
int lda = (transA == CblasNoTrans) ? K : M;
int ldb = (transB == CblasNoTrans) ? N : K;
int ldc = N;
cublasOperation_t cuTransA =
(transA == CblasNoTrans) ? CUBLAS_OP_N : CUBLAS_OP_T;
cublasOperation_t cuTransB =
(transB == CblasNoTrans) ? CUBLAS_OP_N : CUBLAS_OP_T;
const int64_t strideC = M * N;
CUBlas<T>::GEMM_BATCH(context_.cublas_handle(), cuTransB, cuTransA, N, M, K,
&alpha, B, ldb, strideB, A, lda, strideA, &beta, C, ldc,
strideC, batchCount);
}
} // namespace math
} // namespace operators
} // namespace paddle
| pkuyym/Paddle | paddle/fluid/operators/math/blas_impl.cu.h | C | apache-2.0 | 9,628 |
#
# Author:: Bryan McLellan <btm@opscode.com>
# Copyright:: Copyright (c) 2012 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe Chef::Resource::IpsPackage, "initialize" do
before(:each) do
@resource = Chef::Resource::IpsPackage.new("crypto/gnupg")
end
it "should return a Chef::Resource::IpsPackage" do
@resource.should be_a_kind_of(Chef::Resource::IpsPackage)
end
it "should set the resource_name to :ips_package" do
@resource.resource_name.should eql(:ips_package)
end
it "should set the provider to Chef::Provider::Package::Ips" do
@resource.provider.should eql(Chef::Provider::Package::Ips)
end
it "should support accept_license" do
@resource.accept_license(true)
@resource.accept_license.should eql(true)
end
end
| elgalu/chef-depth-1 | spec/unit/resource/ips_package_spec.rb | Ruby | apache-2.0 | 1,353 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package javax.mail;
/**
* @version $Rev$ $Date$
*/
public interface MessageAware {
public abstract MessageContext getMessageContext();
}
| salyh/geronimo-specs | geronimo-javamail_1.5_spec/src/main/java/javax/mail/MessageAware.java | Java | apache-2.0 | 952 |
/**
* @file
* Point To Point Protocol Sequential API module
*
*/
/*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
*
* This file is part of the lwIP TCP/IP stack.
*
*/
#include "lwip/opt.h"
#if LWIP_PPP_API /* don't build if not configured for use in lwipopts.h */
#include "lwip/pppapi.h"
#include "lwip/priv/tcpip_priv.h"
#include "netif/ppp/pppoe.h"
#include "netif/ppp/pppol2tp.h"
#include "netif/ppp/pppos.h"
/**
* Call ppp_set_default() inside the tcpip_thread context.
*/
static err_t
pppapi_do_ppp_set_default(struct tcpip_api_call *m)
{
struct pppapi_msg_msg *msg = (struct pppapi_msg_msg *)m;
ppp_set_default(msg->ppp);
return ERR_OK;
}
/**
* Call ppp_set_default() in a thread-safe way by running that function inside the
* tcpip_thread context.
*/
void
pppapi_set_default(ppp_pcb *pcb)
{
struct pppapi_msg msg;
msg.msg.ppp = pcb;
tcpip_api_call(pppapi_do_ppp_set_default, &msg.call);
}
/**
* Call ppp_set_auth() inside the tcpip_thread context.
*/
static err_t
pppapi_do_ppp_set_auth(struct tcpip_api_call *m)
{
struct pppapi_msg *msg = (struct pppapi_msg *)m;
ppp_set_auth(msg->msg.ppp, msg->msg.msg.setauth.authtype,
msg->msg.msg.setauth.user, msg->msg.msg.setauth.passwd);
return ERR_OK;
}
/**
* Call ppp_set_auth() in a thread-safe way by running that function inside the
* tcpip_thread context.
*/
void
pppapi_set_auth(ppp_pcb *pcb, u8_t authtype, const char *user, const char *passwd)
{
struct pppapi_msg msg;
msg.msg.ppp = pcb;
msg.msg.msg.setauth.authtype = authtype;
msg.msg.msg.setauth.user = user;
msg.msg.msg.setauth.passwd = passwd;
tcpip_api_call(pppapi_do_ppp_set_auth, &msg.call);
}
#if PPP_NOTIFY_PHASE
/**
* Call ppp_set_notify_phase_callback() inside the tcpip_thread context.
*/
static err_t
pppapi_do_ppp_set_notify_phase_callback(struct tcpip_api_call *m)
{
struct pppapi_msg_msg *msg = (struct pppapi_msg_msg *)m;
ppp_set_notify_phase_callback(msg->ppp, msg->msg.setnotifyphasecb.notify_phase_cb);
return ERR_OK;
}
/**
* Call ppp_set_notify_phase_callback() in a thread-safe way by running that function inside the
* tcpip_thread context.
*/
void
pppapi_set_notify_phase_callback(ppp_pcb *pcb, ppp_notify_phase_cb_fn notify_phase_cb)
{
struct pppapi_msg msg;
msg.function = pppapi_do_ppp_set_notify_phase_callback;
msg.msg.ppp = pcb;
msg.msg.msg.setnotifyphasecb.notify_phase_cb = notify_phase_cb;
tcpip_api_call(pppapi_do_ppp_set_notify_phase_callback, &msg.call);
}
#endif /* PPP_NOTIFY_PHASE */
#if PPPOS_SUPPORT
/**
* Call pppos_create() inside the tcpip_thread context.
*/
static err_t
pppapi_do_pppos_create(struct tcpip_api_call *m)
{
struct pppapi_msg *msg = (struct pppapi_msg *)(m);
msg->msg.ppp = pppos_create(msg->msg.msg.serialcreate.pppif, msg->msg.msg.serialcreate.output_cb,
msg->msg.msg.serialcreate.link_status_cb, msg->msg.msg.serialcreate.ctx_cb);
return ERR_OK;
}
/**
* Call pppos_create() in a thread-safe way by running that function inside the
* tcpip_thread context.
*/
ppp_pcb*
pppapi_pppos_create(struct netif *pppif, pppos_output_cb_fn output_cb,
ppp_link_status_cb_fn link_status_cb, void *ctx_cb)
{
struct pppapi_msg msg;
msg.msg.msg.serialcreate.pppif = pppif;
msg.msg.msg.serialcreate.output_cb = output_cb;
msg.msg.msg.serialcreate.link_status_cb = link_status_cb;
msg.msg.msg.serialcreate.ctx_cb = ctx_cb;
tcpip_api_call(pppapi_do_pppos_create, &msg.call);
return msg.msg.ppp;
}
#endif /* PPPOS_SUPPORT */
#if PPPOE_SUPPORT
/**
* Call pppoe_create() inside the tcpip_thread context.
*/
static err_t
pppapi_do_pppoe_create(struct tcpip_api_call *m)
{
struct pppapi_msg_msg *msg = (struct pppapi_msg_msg *)m;
msg->ppp = pppoe_create(msg->msg.ethernetcreate.pppif, msg->msg.ethernetcreate.ethif,
msg->msg.ethernetcreate.service_name, msg->msg.ethernetcreate.concentrator_name,
msg->msg.ethernetcreate.link_status_cb, msg->msg.ethernetcreate.ctx_cb);
return ERR_OK;
}
/**
* Call pppoe_create() in a thread-safe way by running that function inside the
* tcpip_thread context.
*/
ppp_pcb*
pppapi_pppoe_create(struct netif *pppif, struct netif *ethif, const char *service_name,
const char *concentrator_name, ppp_link_status_cb_fn link_status_cb,
void *ctx_cb)
{
struct pppapi_msg msg;
msg.msg.msg.ethernetcreate.pppif = pppif;
msg.msg.msg.ethernetcreate.ethif = ethif;
msg.msg.msg.ethernetcreate.service_name = service_name;
msg.msg.msg.ethernetcreate.concentrator_name = concentrator_name;
msg.msg.msg.ethernetcreate.link_status_cb = link_status_cb;
msg.msg.msg.ethernetcreate.ctx_cb = ctx_cb;
tcpip_api_call(pppapi_do_pppoe_create, &msg.call);
return msg.msg.ppp;
}
#endif /* PPPOE_SUPPORT */
#if PPPOL2TP_SUPPORT
/**
* Call pppol2tp_create() inside the tcpip_thread context.
*/
static err_t
pppapi_do_pppol2tp_create(struct tcpip_api_call *m)
{
struct pppapi_msg_msg *msg = (struct pppapi_msg_msg *)m;
msg->ppp = pppol2tp_create(msg->msg.l2tpcreate.pppif,
msg->msg.l2tpcreate.netif, msg->msg.l2tpcreate.ipaddr, msg->msg.l2tpcreate.port,
#if PPPOL2TP_AUTH_SUPPORT
msg->msg.l2tpcreate.secret,
msg->msg.l2tpcreate.secret_len,
#else /* PPPOL2TP_AUTH_SUPPORT */
NULL,
#endif /* PPPOL2TP_AUTH_SUPPORT */
msg->msg.l2tpcreate.link_status_cb, msg->msg.l2tpcreate.ctx_cb);
return ERR_OK;
}
/**
* Call pppol2tp_create() in a thread-safe way by running that function inside the
* tcpip_thread context.
*/
ppp_pcb*
pppapi_pppol2tp_create(struct netif *pppif, struct netif *netif, ip_addr_t *ipaddr, u16_t port,
const u8_t *secret, u8_t secret_len,
ppp_link_status_cb_fn link_status_cb, void *ctx_cb)
{
struct pppapi_msg msg;
msg.msg.msg.l2tpcreate.pppif = pppif;
msg.msg.msg.l2tpcreate.netif = netif;
msg.msg.msg.l2tpcreate.ipaddr = ipaddr;
msg.msg.msg.l2tpcreate.port = port;
#if PPPOL2TP_AUTH_SUPPORT
msg.msg.msg.l2tpcreate.secret = secret;
msg.msg.msg.l2tpcreate.secret_len = secret_len;
#endif /* PPPOL2TP_AUTH_SUPPORT */
msg.msg.msg.l2tpcreate.link_status_cb = link_status_cb;
msg.msg.msg.l2tpcreate.ctx_cb = ctx_cb;
tcpip_api_call(pppapi_do_pppol2tp_create, &msg.call);
return msg.msg.ppp;
}
#endif /* PPPOL2TP_SUPPORT */
/**
* Call ppp_connect() inside the tcpip_thread context.
*/
static err_t
pppapi_do_ppp_connect(struct tcpip_api_call *m)
{
struct pppapi_msg *msg = (struct pppapi_msg *)m;
return ppp_connect(msg->msg.ppp, msg->msg.msg.connect.holdoff);
}
/**
* Call ppp_connect() in a thread-safe way by running that function inside the
* tcpip_thread context.
*/
err_t
pppapi_connect(ppp_pcb *pcb, u16_t holdoff)
{
struct pppapi_msg msg;
msg.msg.ppp = pcb;
msg.msg.msg.connect.holdoff = holdoff;
return tcpip_api_call(pppapi_do_ppp_connect, &msg.call);
}
#if PPP_SERVER
/**
* Call ppp_listen() inside the tcpip_thread context.
*/
static void
pppapi_do_ppp_listen(struct pppapi_msg_msg *msg)
{
msg->err = ppp_listen(msg->ppp, msg->msg.listen.addrs);
TCPIP_PPPAPI_ACK(msg);
}
/**
* Call ppp_listen() in a thread-safe way by running that function inside the
* tcpip_thread context.
*/
err_t
pppapi_listen(ppp_pcb *pcb, struct ppp_addrs *addrs)
{
struct pppapi_msg msg;
msg.function = pppapi_do_ppp_listen;
msg.msg.ppp = pcb;
msg.msg.msg.listen.addrs = addrs;
TCPIP_PPPAPI(&msg);
return msg.msg.err;
}
#endif /* PPP_SERVER */
/**
* Call ppp_close() inside the tcpip_thread context.
*/
static err_t
pppapi_do_ppp_close(struct tcpip_api_call *m)
{
struct pppapi_msg *msg = (struct pppapi_msg *)m;
return ppp_close(msg->msg.ppp, msg->msg.msg.close.nocarrier);
}
/**
* Call ppp_close() in a thread-safe way by running that function inside the
* tcpip_thread context.
*/
err_t
pppapi_close(ppp_pcb *pcb, u8_t nocarrier)
{
struct pppapi_msg msg;
msg.msg.ppp = pcb;
msg.msg.msg.close.nocarrier = nocarrier;
return tcpip_api_call(pppapi_do_ppp_close, &msg.call);
}
/**
* Call ppp_free() inside the tcpip_thread context.
*/
static err_t
pppapi_do_ppp_free(struct tcpip_api_call *m)
{
struct pppapi_msg_msg *msg = (struct pppapi_msg_msg *)m;
return ppp_free(msg->ppp);
}
/**
* Call ppp_free() in a thread-safe way by running that function inside the
* tcpip_thread context.
*/
err_t
pppapi_free(ppp_pcb *pcb)
{
struct pppapi_msg msg;
msg.msg.ppp = pcb;
return tcpip_api_call(pppapi_do_ppp_free, &msg.call);
}
/**
* Call ppp_ioctl() inside the tcpip_thread context.
*/
static err_t
pppapi_do_ppp_ioctl(struct tcpip_api_call *m)
{
struct pppapi_msg *msg = (struct pppapi_msg *)m;
return ppp_ioctl(msg->msg.ppp, msg->msg.msg.ioctl.cmd, msg->msg.msg.ioctl.arg);
}
/**
* Call ppp_ioctl() in a thread-safe way by running that function inside the
* tcpip_thread context.
*/
err_t
pppapi_ioctl(ppp_pcb *pcb, u8_t cmd, void *arg)
{
struct pppapi_msg msg;
msg.msg.ppp = pcb;
msg.msg.msg.ioctl.cmd = cmd;
msg.msg.msg.ioctl.arg = arg;
return tcpip_api_call(pppapi_do_ppp_ioctl, &msg.call);
}
#endif /* LWIP_PPP_API */
| jaracil/esp-idf | components/lwip/api/pppapi.c | C | apache-2.0 | 10,470 |
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.client.editors.expressions.types.function.supplementary.pmml;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.soup.commons.util.Sets;
import org.kie.workbench.common.dmn.api.definition.model.Definitions;
import org.kie.workbench.common.dmn.api.definition.model.Import;
import org.kie.workbench.common.dmn.api.definition.model.ImportDMN;
import org.kie.workbench.common.dmn.api.definition.model.ImportPMML;
import org.kie.workbench.common.dmn.api.editors.included.DMNImportTypes;
import org.kie.workbench.common.dmn.api.editors.included.PMMLDocumentMetadata;
import org.kie.workbench.common.dmn.api.editors.included.PMMLIncludedModel;
import org.kie.workbench.common.dmn.api.editors.included.PMMLModelMetadata;
import org.kie.workbench.common.dmn.api.editors.included.PMMLParameterMetadata;
import org.kie.workbench.common.dmn.api.property.dmn.LocationURI;
import org.kie.workbench.common.dmn.client.editors.included.imports.IncludedModelsPageStateProviderImpl;
import org.kie.workbench.common.dmn.client.graph.DMNGraphUtils;
import org.kie.workbench.common.dmn.client.service.DMNClientServicesProxy;
import org.kie.workbench.common.stunner.core.client.service.ServiceCallback;
import org.kie.workbench.common.stunner.core.diagram.Diagram;
import org.kie.workbench.common.stunner.core.diagram.Metadata;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.uberfire.backend.vfs.Path;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyListOf;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class PMMLDocumentMetadataProviderTest {
@Mock
private DMNGraphUtils graphUtils;
@Mock
private DMNClientServicesProxy clientServicesProxy;
@Mock
private IncludedModelsPageStateProviderImpl stateProvider;
@Mock
private Path dmnModelPath;
@Captor
private ArgumentCaptor<List<PMMLIncludedModel>> pmmlIncludedModelsArgumentCaptor;
@Captor
private ArgumentCaptor<ServiceCallback<List<PMMLDocumentMetadata>>> callbackArgumentCaptor;
private Definitions definitions;
private PMMLDocumentMetadataProvider provider;
@Before
public void setup() {
this.definitions = new Definitions();
this.provider = new PMMLDocumentMetadataProvider(graphUtils,
clientServicesProxy,
stateProvider);
final Diagram diagram = mock(Diagram.class);
final Metadata metadata = mock(Metadata.class);
when(stateProvider.getDiagram()).thenReturn(Optional.of(diagram));
when(diagram.getMetadata()).thenReturn(metadata);
when(metadata.getPath()).thenReturn(dmnModelPath);
when(graphUtils.getDefinitions(diagram)).thenReturn(definitions);
}
@Test
@SuppressWarnings("unchecked")
public void testLoadPMMLIncludedDocumentsDMNModelPath() {
provider.loadPMMLIncludedDocuments();
verify(clientServicesProxy).loadPMMLDocumentsFromImports(eq(dmnModelPath),
anyListOf(PMMLIncludedModel.class),
any(ServiceCallback.class));
}
@Test
@SuppressWarnings("unchecked")
public void testLoadPMMLIncludedDocumentsPMMLIncludedModels() {
final Import dmn = new ImportDMN("dmn",
new LocationURI("dmn-location"),
DMNImportTypes.DMN.getDefaultNamespace());
final Import pmml = new ImportPMML("pmml",
new LocationURI("pmml-location"),
DMNImportTypes.PMML.getDefaultNamespace());
dmn.getName().setValue("dmn");
pmml.getName().setValue("pmml");
definitions.getImport().add(dmn);
definitions.getImport().add(pmml);
provider.loadPMMLIncludedDocuments();
verify(clientServicesProxy).loadPMMLDocumentsFromImports(any(Path.class),
pmmlIncludedModelsArgumentCaptor.capture(),
any(ServiceCallback.class));
final List<PMMLIncludedModel> actualIncludedModels = pmmlIncludedModelsArgumentCaptor.getValue();
assertThat(actualIncludedModels).hasSize(1);
final PMMLIncludedModel pmmlIncludedModel = actualIncludedModels.get(0);
assertThat(pmmlIncludedModel.getModelName()).isEqualTo("pmml");
assertThat(pmmlIncludedModel.getPath()).isEqualTo("pmml-location");
assertThat(pmmlIncludedModel.getImportType()).isEqualTo(DMNImportTypes.PMML.getDefaultNamespace());
}
@Test
public void testGetPMMLDocumentNames() {
final List<PMMLDocumentMetadata> pmmlDocuments = new ArrayList<>();
pmmlDocuments.add(new PMMLDocumentMetadata("path1",
"zDocument1",
DMNImportTypes.PMML.getDefaultNamespace(),
Collections.emptyList()));
pmmlDocuments.add(new PMMLDocumentMetadata("path2",
"aDocument2",
DMNImportTypes.PMML.getDefaultNamespace(),
Collections.emptyList()));
final ServiceCallback<List<PMMLDocumentMetadata>> callback = loadPMMLIncludedDocuments();
callback.onSuccess(pmmlDocuments);
final List<String> documentNames = provider.getPMMLDocumentNames();
assertThat(documentNames).containsSequence("aDocument2", "zDocument1");
}
private ServiceCallback<List<PMMLDocumentMetadata>> loadPMMLIncludedDocuments() {
provider.loadPMMLIncludedDocuments();
verify(clientServicesProxy).loadPMMLDocumentsFromImports(any(Path.class),
anyListOf(PMMLIncludedModel.class),
callbackArgumentCaptor.capture());
return callbackArgumentCaptor.getValue();
}
@Test
public void testGetPMMLDocumentModelNames() {
final List<PMMLDocumentMetadata> pmmlDocuments = new ArrayList<>();
pmmlDocuments.add(new PMMLDocumentMetadata("path",
"document",
DMNImportTypes.PMML.getDefaultNamespace(),
asList(new PMMLModelMetadata("zModel1",
Collections.emptySet()),
new PMMLModelMetadata("aModel2",
Collections.emptySet()))));
final ServiceCallback<List<PMMLDocumentMetadata>> callback = loadPMMLIncludedDocuments();
callback.onSuccess(pmmlDocuments);
final List<String> modelNames = provider.getPMMLDocumentModels("document");
assertThat(modelNames).containsSequence("aModel2", "zModel1");
assertThat(provider.getPMMLDocumentModels("unknown")).isEmpty();
}
@Test
public void testGetPMMLDocumentModelParameterNames() {
final List<PMMLDocumentMetadata> pmmlDocuments = new ArrayList<>();
pmmlDocuments.add(new PMMLDocumentMetadata("path",
"document",
DMNImportTypes.PMML.getDefaultNamespace(),
singletonList(new PMMLModelMetadata("model",
new Sets.Builder<PMMLParameterMetadata>()
.add(new PMMLParameterMetadata("zParameter1"))
.add(new PMMLParameterMetadata("aParameter2"))
.build()))));
final ServiceCallback<List<PMMLDocumentMetadata>> callback = loadPMMLIncludedDocuments();
callback.onSuccess(pmmlDocuments);
final List<String> modelNames = provider.getPMMLDocumentModelParameterNames("document", "model");
assertThat(modelNames).containsSequence("aParameter2", "zParameter1");
assertThat(provider.getPMMLDocumentModelParameterNames("unknown", "unknown")).isEmpty();
}
}
| mbiarnes/kie-wb-common | kie-wb-common-dmn/kie-wb-common-dmn-client/src/test/java/org/kie/workbench/common/dmn/client/editors/expressions/types/function/supplementary/pmml/PMMLDocumentMetadataProviderTest.java | Java | apache-2.0 | 10,034 |
<?php
/**
* ALIPAY API: alipay.eco.mycar.parking.lotbarcode.create request
*
* @author auto create
* @since 1.0, 2016-06-14 15:08:52
*/
class AlipayEcoMycarParkingLotbarcodeCreateRequest
{
/**
* 物料二维码
**/
private $bizContent;
private $apiParas = array();
private $terminalType;
private $terminalInfo;
private $prodCode;
private $apiVersion="1.0";
private $notifyUrl;
private $returnUrl;
private $needEncrypt=false;
public function setBizContent($bizContent)
{
$this->bizContent = $bizContent;
$this->apiParas["biz_content"] = $bizContent;
}
public function getBizContent()
{
return $this->bizContent;
}
public function getApiMethodName()
{
return "alipay.eco.mycar.parking.lotbarcode.create";
}
public function setNotifyUrl($notifyUrl)
{
$this->notifyUrl=$notifyUrl;
}
public function getNotifyUrl()
{
return $this->notifyUrl;
}
public function setReturnUrl($returnUrl)
{
$this->returnUrl=$returnUrl;
}
public function getReturnUrl()
{
return $this->returnUrl;
}
public function getApiParas()
{
return $this->apiParas;
}
public function getTerminalType()
{
return $this->terminalType;
}
public function setTerminalType($terminalType)
{
$this->terminalType = $terminalType;
}
public function getTerminalInfo()
{
return $this->terminalInfo;
}
public function setTerminalInfo($terminalInfo)
{
$this->terminalInfo = $terminalInfo;
}
public function getProdCode()
{
return $this->prodCode;
}
public function setProdCode($prodCode)
{
$this->prodCode = $prodCode;
}
public function setApiVersion($apiVersion)
{
$this->apiVersion=$apiVersion;
}
public function getApiVersion()
{
return $this->apiVersion;
}
public function setNeedEncrypt($needEncrypt)
{
$this->needEncrypt=$needEncrypt;
}
public function getNeedEncrypt()
{
return $this->needEncrypt;
}
}
| houdunwang/hdphp | vendor/houdunwang/alipay/src/org/aop/request/AlipayEcoMycarParkingLotbarcodeCreateRequest.php | PHP | apache-2.0 | 1,918 |
package com.github.dockerjava.core.command;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.builder.ReflectionToStringBuilder;
import org.apache.commons.lang.builder.ToStringStyle;
import com.github.dockerjava.api.command.ListImagesCmd;
import com.github.dockerjava.api.model.Image;
import com.github.dockerjava.core.util.FiltersBuilder;
/**
* List images
*/
public class ListImagesCmdImpl extends AbstrDockerCmd<ListImagesCmd, List<Image>> implements ListImagesCmd {
private String imageNameFilter;
private Boolean showAll = false;
private FiltersBuilder filters = new FiltersBuilder();
public ListImagesCmdImpl(ListImagesCmd.Exec exec) {
super(exec);
}
@Override
public Map<String, List<String>> getFilters() {
return filters.build();
}
@Override
public Boolean hasShowAllEnabled() {
return showAll;
}
@Override
public ListImagesCmd withShowAll(Boolean showAll) {
this.showAll = showAll;
return this;
}
@Override
public ListImagesCmd withDanglingFilter(Boolean dangling) {
checkNotNull(dangling, "dangling have not been specified");
filters.withFilter("dangling", dangling.toString());
return this;
}
@Override
public ListImagesCmd withLabelFilter(String... labels) {
checkNotNull(labels, "labels have not been specified");
filters.withLabels(labels);
return this;
}
@Override
public ListImagesCmd withLabelFilter(Map<String, String> labels) {
checkNotNull(labels, "labels have not been specified");
filters.withLabels(labels);
return this;
}
@Override
public ListImagesCmd withImageNameFilter(String imageNameFilter) {
checkNotNull(imageNameFilter, "image name filter not specified");
this.imageNameFilter = imageNameFilter;
return this;
}
@Override
public String getImageNameFilter() {
return this.imageNameFilter;
}
@Override
public String toString() {
return ReflectionToStringBuilder.toString(this, ToStringStyle.SHORT_PREFIX_STYLE);
}
}
| ollie314/docker-java | src/main/java/com/github/dockerjava/core/command/ListImagesCmdImpl.java | Java | apache-2.0 | 2,238 |
/*
* Copyright (C) 2013 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef SharedWorkerRepositoryClientImpl_h
#define SharedWorkerRepositoryClientImpl_h
#include "core/workers/SharedWorkerRepositoryClient.h"
#include "wtf/Noncopyable.h"
#include "wtf/PassOwnPtr.h"
#include "wtf/PassRefPtr.h"
namespace blink {
class WebSharedWorkerRepositoryClient;
class SharedWorkerRepositoryClientImpl final : public SharedWorkerRepositoryClient {
WTF_MAKE_NONCOPYABLE(SharedWorkerRepositoryClientImpl);
public:
static PassOwnPtr<SharedWorkerRepositoryClientImpl> create(WebSharedWorkerRepositoryClient* client)
{
return adoptPtr(new SharedWorkerRepositoryClientImpl(client));
}
~SharedWorkerRepositoryClientImpl() override { }
void connect(PassRefPtrWillBeRawPtr<SharedWorker>, PassOwnPtr<WebMessagePortChannel>, const KURL&, const String& name, ExceptionState&) override;
void documentDetached(Document*) override;
private:
explicit SharedWorkerRepositoryClientImpl(WebSharedWorkerRepositoryClient*);
WebSharedWorkerRepositoryClient* m_client;
};
} // namespace blink
#endif // SharedWorkerRepositoryClientImpl_h
| weolar/miniblink49 | third_party/WebKit/Source/web/SharedWorkerRepositoryClientImpl.h | C | apache-2.0 | 2,653 |
// Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* Number.NEGATIVE_INFINITY is -Infinity
*
* @path ch15/15.7/15.7.3/15.7.3.5/S15.7.3.5_A1.js
* @description Checking sign and finiteness of Number.NEGATIVE_INFINITY
*/
// CHECK#1
if (isFinite(Number.NEGATIVE_INFINITY) !== false) {
$ERROR('#1: Number.NEGATIVE_INFINITY === Not-a-Finite');
} else {
if ((Number.NEGATIVE_INFINITY < 0) !== true) {
$ERROR('#1: Number.NEGATIVE_INFINITY === -Infinity');
}
}
| hippich/typescript | tests/Fidelity/test262/suite/ch15/15.7/15.7.3/15.7.3.5/S15.7.3.5_A1.js | JavaScript | apache-2.0 | 555 |
/*
* Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.rmi.server;
/**
* An obsolete subclass of {@link ExportException}.
*
* @author Ann Wollrath
* @since JDK1.1
* @deprecated This class is obsolete. Use {@link ExportException} instead.
*/
@Deprecated
public class SocketSecurityException extends ExportException {
/* indicate compatibility with JDK 1.1.x version of class */
private static final long serialVersionUID = -7622072999407781979L;
/**
* Constructs an <code>SocketSecurityException</code> with the specified
* detail message.
*
* @param s the detail message.
* @since JDK1.1
*/
public SocketSecurityException(String s) {
super(s);
}
/**
* Constructs an <code>SocketSecurityException</code> with the specified
* detail message and nested exception.
*
* @param s the detail message.
* @param ex the nested exception
* @since JDK1.1
*/
public SocketSecurityException(String s, Exception ex) {
super(s, ex);
}
}
| shun634501730/java_source_cn | src_en/java/rmi/server/SocketSecurityException.java | Java | apache-2.0 | 1,223 |
# Fur material
# Using the fur material
The fur material needs a high number of the triangular facets that make up a mesh to work well.
The number of facets needed also depends on the size of the mesh.
Example that seem to work for ground and sphere are:
```
var ground = BABYLON.Mesh.CreateGround("ground", 8, 8, 200, scene);
var sphere = BABYLON.Mesh.CreateSphere("sphere", 500, 8, scene);
```
The fur material is created using
```
var furMaterial = new BABYLON.FurMaterial("fur_material", scene);
ground.material = furMaterial;
```
# Customize the fur material
You can customise three properties of the fur material:
```
furMaterial.furLength = 3; // Represents the maximum length of the fur, which is then adjusted randomly. Default value is 1.
furMaterial.furAngle = Math.PI/6; // Represents the angle the fur lies on the mesh from 0 to Math.PI/2. The default angle of 0 gives fur sticking straight up and PI/2 lies along the mesh.
furMaterial.furColor = new BABYLON.Color3(0.44, 0.21, 0.02); // is the default color if furColor is not set.
```
# Using textures
##heightTexture
A greyscale image can be used to set the fur length.
A speckled greyscale image can produce fur like results.
Any greyscale image with affect the fur length producing a heightMap type effect.
```
furMaterial.heightTexture = new BABYLON.Texture("speckles.jpg", scene); // Set the fur length with a texture.
```
##diffuseTexture
A texture can also be used to paint the mesh.
The leopard fur texture used in the test is by Martin Wegmann from [Wikimedia Commons](https://commons.wikimedia.org/wiki/File:Leopard_fur.JPG)
under the [license](https://creativecommons.org/licenses/by-sa/3.0/deed.en)
```
furMaterial.diffuseTexture = new BABYLON.Texture("leopard_fur.JPG, scene); // Set the fur length with a texture.
```
# Using the High Level mode
Fur materials have always been subjects of a lot of theories and conferences with multiple implementations thanks to multiple technologies.
Here, with WebGL, we decided to choose one of these implementations, not hard to use and pretty smart (with performances) with simple models
First, activate the high level (activated by default):
```
furMaterial.highLevelFur = true;
```
That's all. Now, the most difficult part should be to configure the shells and the fur texture to create the fur effect.
Indeed, you'll have to draw several times the same mesh with an offset (computed in the effect) to create the illusion of fur.
Hopefully, there is a function that creates and returns the shells:
```
// Generate a fur texture (internally used), working like a noise texture, that will be shared between all the shells
var furTexture = BABYLON.FurMaterial.GenerateTexture("furTexture", scene);
furMaterial.furTexture = furTexture;
myMesh.material = furMaterial;
var quality = 30; // Average quality
// Create shells
var shells = BABYLON.FurMaterial.FurifyMesh(myMesh, quality);
```
It is now working!
The function "BABYLON.FurMaterial.FurifyMesh" returns an array of "BABYLON.Mesh" that you can dispose later.
The first element is the mesh you used as the source mesh (myMesh here):
```
for (var i=0; i < shells.length; i++) {
shells[i].material.dispose();
shells[i].dispose();
}
```
You can customize the high level fur rendering thanks to some properties:
```
allFurMaterials.furSpacing = 2; // Computes the space between shells. In others words, works as the fur height
```
```
allFurMaterials.furDensity = 20; // Computes the fur density. More the density is high, more you'll have to zoom on the model
```
```
allFurMaterials.furSpeed = 100; // Divides the animation of fur in time according to the gravity
```
```
// Compute the gravity followed by the fur
allFurMaterials.furGravity = new BABYLON.Vector3(0, -1, 0);
```
# Meshes where the number of facets is not user controlled on creation.
Unlike the ground mesh where you can supply the number of subdivisions or the sphere mesh where you can supply the number of segments the majority of meshes are created using a minimum number of facets.
To apply the fur material to these the number of facets per face of the mesh needs to be increased.
The function increasedFacets will do this:
When n is the number of points per side added to each side of a facet the number of facets is increased by the square of (n + 1).
```
function increasedFacets(mesh, pps) { //pps points per side
var gaps = pps+1;
var n = gaps + 1;
var fvs =[];
for(var i=0; i<n; i++) {
fvs[i] = [];
}
var A,B;
var d ={x:0,y:0,z:0};
var u ={x:0,y:0};
var indices = [];
var vertexIndex = [];
var side = [];
var uvs = mesh.getVerticesData(BABYLON.VertexBuffer.UVKind);
var meshIndices = mesh.getIndices();
var positions = mesh.getVerticesData(BABYLON.VertexBuffer.PositionKind);
var normals =[];
for(var i = 0; i<meshIndices.length; i+=3) {
vertexIndex[0] = meshIndices[i];
vertexIndex[1] = meshIndices[i + 1];
vertexIndex[2] = meshIndices[i + 2];
for(var j = 0; j<3; j++) {
A = vertexIndex[j];
B = vertexIndex[(j+1)%3];
if(side[A] === undefined && side[B] === undefined) {
side[A] = [];
side[B] = [];
}
else {
if(side[A] === undefined) {
side[A] = [];
}
if(side[B] === undefined) {
side[B] = [];
}
}
if(side[A][B] === undefined && side[B][A] === undefined) {
side[A][B] = [];
d.x = (positions[3 * B] - positions[3 * A])/gaps;
d.y = (positions[3 * B + 1] - positions[3 * A + 1])/gaps;
d.z = (positions[3 * B + 2] - positions[3 * A + 2])/gaps;
u.x = (uvs[2*B] - uvs[2*A])/gaps;
u.y = (uvs[2*B + 1] - uvs[2*A + 1])/gaps;
side[A][B].push(A);
for(var k=1; k<gaps; k++) {
side[A][B].push(positions.length/3);
positions.push(positions[3 * A] + k*d.x, positions[3 * A + 1] + k*d.y, positions[3 * A + 2] + k*d.z);
uvs.push(uvs[2*A] + k*u.x, uvs[2*A + 1] + k*u.y);
}
side[A][B].push(B);
side[B][A]=[];
l = side[A][B].length;
for(var a=0; a<l; a++) {
side[B][A][a] = side[A][B][l-1-a];
}
}
else {
if(side[A][B] === undefined) {
side[A][B]=[];
l = side[B][A].length;
for(var a=0; a<l; a++) {
side[A][B][a] = side[B][A][l-1-a];
}
}
if(side[B][A] === undefined) {
side[B][A]=[];
l = side[A][B].length;
for(var a=0; a<l; a++) {
side[B][A][a] = side[A][B][l-1-a];
}
}
}
}
fvs[0][0] = meshIndices[i];
fvs[1][0] = side[meshIndices[i]][meshIndices[i + 1]][1];
fvs[1][1] = side[meshIndices[i]][meshIndices[i + 2]][1];
for(var k = 2; k<gaps; k++) {
fvs[k][0] = side[meshIndices[i]][meshIndices[i + 1]][k];
fvs[k][k] = side[meshIndices[i]][meshIndices[i + 2]][k];
d.x = (positions[3 * fvs[k][k]] - positions[3 * fvs[k][0]])/k;
d.y = (positions[3 * fvs[k][k] + 1] - positions[3 * fvs[k][0] + 1])/k;
d.z = (positions[3 * fvs[k][k] + 2] - positions[3 * fvs[k][0] + 2])/k;
u.x = (uvs[2*fvs[k][k]] - uvs[2*fvs[k][0]])/k;
u.y = (uvs[2*fvs[k][k] + 1] - uvs[2*fvs[k][0] + 1])/k;
for(var j = 1; j<k; j++) {
fvs[k][j] = positions.length/3;
positions.push(positions[3 * fvs[k][0]] + j*d.x, positions[3 * fvs[k][0] + 1] + j*d.y, positions[3 * fvs[k][0] + 2] + j*d.z);
uvs.push(uvs[2*fvs[k][0]] + j*u.x, uvs[2*fvs[k][0] + 1] + j*u.y);
}
}
fvs[gaps] = side[meshIndices[i + 1]][meshIndices[i + 2]];
indices.push(fvs[0][0],fvs[1][0],fvs[1][1]);
for(var k = 1; k<gaps; k++) {
for(var j = 0; j<k; j++) {
indices.push(fvs[k][j],fvs[k+1][j],fvs[k+1][j+1]);
indices.push(fvs[k][j],fvs[k+1][j+1],fvs[k][j+1]);
}
indices.push(fvs[k][j],fvs[k+1][j],fvs[k+1][j+1]);
}
}
var vertexData = new BABYLON.VertexData();
vertexData.positions = positions;
vertexData.indices = indices;
vertexData.uvs = uvs;
BABYLON.VertexData.ComputeNormals(positions, indices, normals);
vertexData.normals = normals;
mesh.dispose();
var newmesh = new BABYLON.Mesh("newmesh", scene);
vertexData.applyToMesh(newmesh);
return newmesh;
}
```
For sharp edged meshes such as a box the shader can separate the faces since the faces meeting at the corners have there own vertices and normals at these vertices.
These meshes are flat shaded. If this separation of the edges is a problem then the function convertToSmoothShadedMesh() can be used.
However this can then produce some artefacts at the edges.
```
function convertToSmoothShadedMesh(mesh) {
var meshIndices = mesh.getIndices();
var meshPositions = mesh.getVerticesData(BABYLON.VertexBuffer.PositionKind);
var mesh_uvs = mesh.getVerticesData(BABYLON.VertexBuffer.UVKind);
var setPositions = [];
var indices = [];
var positions = [];
var uvs = [];
var normals = [];
var p;
var indexMap = [];
for(var i=0; i<meshPositions.length; i+=3) {
var temp =[];
temp.push(i/3, meshPositions[i], meshPositions[i + 1], meshPositions[i + 2], mesh_uvs[2*i/3], mesh_uvs[2*i/3 + 1]);
setPositions.push(temp);
}
var i=0;
while(setPositions.length>0) {
p = setPositions.shift();
positions.push(p[1],p[2],p[3]);
uvs.push(p[4],p[5]);
indexMap[p[0]] = i;
var j = 0;
while(j<setPositions.length) {
if (Math.abs(p[1] - setPositions[j][1])<Math.pow(0.1, 10) && Math.abs(p[2] - setPositions[j][2])<Math.pow(0.1, 10) && Math.abs(p[3] - setPositions[j][3])<Math.pow(0.1, 10) ) {
indexMap[setPositions[j][0]] = i;
setPositions.splice(j,1);
}
else {
j++;
}
}
i++;
}
for(var i=0; i<meshIndices.length; i++) {
indices.push(indexMap[meshIndices[i]]);
}
var vertexData = new BABYLON.VertexData();
vertexData.positions = positions;
vertexData.indices = indices;
vertexData.uvs = uvs;
BABYLON.VertexData.ComputeNormals(positions, indices, normals);
vertexData.normals = normals;
vertexData.applyToMesh(mesh);
return mesh;
}
```
| Hersir88/Babylon.js | materialsLibrary/src/fur/readme.md | Markdown | apache-2.0 | 9,880 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.registry.client;
import org.apache.nifi.registry.flow.VersionedFlowSnapshot;
import org.apache.nifi.registry.flow.VersionedFlowSnapshotMetadata;
import java.io.IOException;
import java.util.List;
/**
* Client for interacting with snapshots.
*/
public interface FlowSnapshotClient {
/**
* Creates a new snapshot/version for the given flow.
*
* The snapshot object must have the version populated, and will receive an error if the submitted version is
* not the next one-up version.
*
* @param snapshot the new snapshot
* @return the created snapshot
* @throws NiFiRegistryException if an error is encountered other than IOException
* @throws IOException if an I/O error is encountered
*/
VersionedFlowSnapshot create(VersionedFlowSnapshot snapshot) throws NiFiRegistryException, IOException;
/**
* Gets the snapshot for the given bucket, flow, and version.
*
* @param bucketId the bucket id
* @param flowId the flow id
* @param version the version
* @return the snapshot with the given version of the given flow in the given bucket
* @throws NiFiRegistryException if an error is encountered other than IOException
* @throws IOException if an I/O error is encountered
*/
VersionedFlowSnapshot get(String bucketId, String flowId, int version) throws NiFiRegistryException, IOException;
/**
* Gets the snapshot for the given flow and version.
*
* @param flowId the flow id
* @param version the version
* @return the snapshot with the given version of the given flow
* @throws NiFiRegistryException if an error is encountered other than IOException
* @throws IOException if an I/O error is encountered
*/
VersionedFlowSnapshot get(String flowId, int version) throws NiFiRegistryException, IOException;
/**
* Gets the latest snapshot for the given flow.
*
* @param bucketId the bucket id
* @param flowId the flow id
* @return the snapshot with the latest version for the given flow
* @throws NiFiRegistryException if an error is encountered other than IOException
* @throws IOException if an I/O error is encountered
*/
VersionedFlowSnapshot getLatest(String bucketId, String flowId) throws NiFiRegistryException, IOException;
/**
* Gets the latest snapshot for the given flow.
*
* @param flowId the flow id
* @return the snapshot with the latest version for the given flow
* @throws NiFiRegistryException if an error is encountered other than IOException
* @throws IOException if an I/O error is encountered
*/
VersionedFlowSnapshot getLatest(String flowId) throws NiFiRegistryException, IOException;
/**
* Gets the latest snapshot metadata for the given flow.
*
* @param bucketId the bucket id
* @param flowId the flow id
* @return the snapshot metadata for the latest version of the given flow
* @throws NiFiRegistryException if an error is encountered other than IOException
* @throws IOException if an I/O error is encountered
*/
VersionedFlowSnapshotMetadata getLatestMetadata(String bucketId, String flowId) throws NiFiRegistryException, IOException;
/**
* Gets the latest snapshot metadata for the given flow.
*
* @param flowId the flow id
* @return the snapshot metadata for the latest version of the given flow
* @throws NiFiRegistryException if an error is encountered other than IOException
* @throws IOException if an I/O error is encountered
*/
VersionedFlowSnapshotMetadata getLatestMetadata(String flowId) throws NiFiRegistryException, IOException;
/**
* Gets a list of the metadata for all snapshots of a given flow.
*
* The contents of each snapshot are not part of the response.
*
* @param bucketId the bucket id
* @param flowId the flow id
* @return the list of snapshot metadata
* @throws NiFiRegistryException if an error is encountered other than IOException
* @throws IOException if an I/O error is encountered
*/
List<VersionedFlowSnapshotMetadata> getSnapshotMetadata(String bucketId, String flowId) throws NiFiRegistryException, IOException;
/**
* Gets a list of the metadata for all snapshots of a given flow.
*
* The contents of each snapshot are not part of the response.
*
* @param flowId the flow id
* @return the list of snapshot metadata
* @throws NiFiRegistryException if an error is encountered other than IOException
* @throws IOException if an I/O error is encountered
*/
List<VersionedFlowSnapshotMetadata> getSnapshotMetadata(String flowId) throws NiFiRegistryException, IOException;
}
| MikeThomsen/nifi | nifi-registry/nifi-registry-core/nifi-registry-client/src/main/java/org/apache/nifi/registry/client/FlowSnapshotClient.java | Java | apache-2.0 | 5,613 |
/*******************************************************************************
* Copyright (c) Intel Corporation
* Copyright (c) 2017
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package org.osc.core.broker.service.tasks.conformance.openstack;
import java.io.File;
import java.util.Set;
import javax.persistence.EntityManager;
import org.osc.core.broker.job.lock.LockObjectReference;
import org.osc.core.broker.model.entities.appliance.ApplianceSoftwareVersion;
import org.osc.core.broker.model.entities.appliance.VirtualSystem;
import org.osc.core.broker.model.entities.virtualization.openstack.OsImageReference;
import org.osc.core.broker.rest.client.openstack.openstack4j.Endpoint;
import org.osc.core.broker.rest.client.openstack.openstack4j.Openstack4jGlance;
import org.osc.core.broker.service.appliance.UploadConfig;
import org.osc.core.broker.service.persistence.OSCEntityManager;
import org.osc.core.broker.service.tasks.TransactionalTask;
import org.slf4j.LoggerFactory;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.ConfigurationPolicy;
import org.slf4j.Logger;
@Component(service = UploadImageToGlanceTask.class,
configurationPid = "org.osc.core.broker.upload",
configurationPolicy = ConfigurationPolicy.REQUIRE)
public class UploadImageToGlanceTask extends TransactionalTask {
private final Logger log = LoggerFactory.getLogger(UploadImageToGlanceTask.class);
private String region;
private VirtualSystem vs;
private String glanceImageName;
private ApplianceSoftwareVersion applianceSoftwareVersion;
private Endpoint osEndPoint;
private String uploadPath;
@Activate
void activate(UploadConfig config) {
this.uploadPath = config.upload_path();
}
public UploadImageToGlanceTask create(VirtualSystem vs, String region, String glanceImageName, ApplianceSoftwareVersion applianceSoftwareVersion, Endpoint osEndPoint) {
UploadImageToGlanceTask task = new UploadImageToGlanceTask();
task.vs = vs;
task.region = region;
task.applianceSoftwareVersion = applianceSoftwareVersion;
task.osEndPoint = osEndPoint;
task.glanceImageName = glanceImageName;
task.name = task.getName();
task.uploadPath = this.uploadPath;
task.dbConnectionManager = this.dbConnectionManager;
task.txBroadcastUtil = this.txBroadcastUtil;
return task;
}
@Override
public void executeTransaction(EntityManager em) throws Exception {
OSCEntityManager<VirtualSystem> emgr = new OSCEntityManager<>(VirtualSystem.class, em, this.txBroadcastUtil);
this.vs = emgr.findByPrimaryKey(this.vs.getId());
this.log.info("Uploading image " + this.glanceImageName + " to region + " + this.region);
File imageFile = new File(this.uploadPath + this.applianceSoftwareVersion.getImageUrl());
try (Openstack4jGlance glance = new Openstack4jGlance(this.osEndPoint)) {
String imageId = glance.uploadImage(this.region, this.glanceImageName, imageFile, this.applianceSoftwareVersion.getImageProperties());
this.vs.addOsImageReference(new OsImageReference(this.vs, this.region, imageId));
}
OSCEntityManager.update(em, this.vs, this.txBroadcastUtil);
}
@Override
public String getName() {
return String.format("Uploading image '%s' to region '%s'", this.glanceImageName, this.region);
}
@Override
public Set<LockObjectReference> getObjects() {
return LockObjectReference.getObjectReferences(this.vs);
}
}
| emanoelxavier/osc-core | osc-server/src/main/java/org/osc/core/broker/service/tasks/conformance/openstack/UploadImageToGlanceTask.java | Java | apache-2.0 | 4,272 |
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.driver.extensions;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import org.junit.Test;
import com.google.common.testing.EqualsTester;
/**
* Unit tests for NiciraTunGpeNp class.
*/
public class NiciraTunGpeNpTest {
final byte np1 = (byte) 1;
final byte np2 = (byte) 2;
/**
* Checks the operation of equals() methods.
*/
@Test
public void testEquals() {
final NiciraTunGpeNp tunGpeNp1 = new NiciraTunGpeNp(np1);
final NiciraTunGpeNp sameAsTunGpeNp1 = new NiciraTunGpeNp(np1);
final NiciraTunGpeNp tunGpeNp2 = new NiciraTunGpeNp(np2);
new EqualsTester().addEqualityGroup(tunGpeNp1, sameAsTunGpeNp1).addEqualityGroup(tunGpeNp2)
.testEquals();
}
/**
* Checks the construction of a NiciraTunGpeNp object.
*/
@Test
public void testConstruction() {
final NiciraTunGpeNp tunGpeNp1 = new NiciraTunGpeNp(np1);
assertThat(tunGpeNp1, is(notNullValue()));
assertThat(tunGpeNp1.tunGpeNp(), is(np1));
}
}
| gkatsikas/onos | drivers/default/src/test/java/org/onosproject/driver/extensions/NiciraTunGpeNpTest.java | Java | apache-2.0 | 1,762 |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.navigation;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.codeInsight.daemon.impl.PsiElementListNavigator;
import com.intellij.codeInsight.generation.actions.PresentableCodeInsightActionHandler;
import com.intellij.codeInsight.navigation.actions.GotoSuperAction;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.ide.util.MethodCellRenderer;
import com.intellij.ide.util.PsiNavigationSupport;
import com.intellij.idea.ActionsBundle;
import com.intellij.openapi.actionSystem.Presentation;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.Navigatable;
import com.intellij.psi.*;
import com.intellij.psi.impl.FindSuperElementsHelper;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.ArrayUtil;
import org.jetbrains.annotations.NotNull;
public class JavaGotoSuperHandler implements PresentableCodeInsightActionHandler {
@Override
public void invoke(@NotNull final Project project, @NotNull final Editor editor, @NotNull final PsiFile file) {
FeatureUsageTracker.getInstance().triggerFeatureUsed(GotoSuperAction.FEATURE_ID);
int offset = editor.getCaretModel().getOffset();
PsiElement[] superElements = findSuperElements(file, offset);
if (superElements.length == 0) return;
if (superElements.length == 1) {
PsiElement superElement = superElements[0].getNavigationElement();
final PsiFile containingFile = superElement.getContainingFile();
if (containingFile == null) return;
final VirtualFile virtualFile = containingFile.getVirtualFile();
if (virtualFile == null) return;
Navigatable descriptor =
PsiNavigationSupport.getInstance().createNavigatable(project, virtualFile, superElement.getTextOffset());
descriptor.navigate(true);
}
else if (superElements[0] instanceof PsiMethod) {
boolean showMethodNames = !PsiUtil.allMethodsHaveSameSignature((PsiMethod[])superElements);
PsiElementListNavigator.openTargets(editor, (PsiMethod[])superElements,
CodeInsightBundle.message("goto.super.method.chooser.title"),
CodeInsightBundle
.message("goto.super.method.findUsages.title", ((PsiMethod)superElements[0]).getName()),
new MethodCellRenderer(showMethodNames));
}
else {
NavigationUtil.getPsiElementPopup(superElements, CodeInsightBundle.message("goto.super.class.chooser.title"))
.showInBestPositionFor(editor);
}
}
@NotNull
private PsiElement[] findSuperElements(@NotNull PsiFile file, int offset) {
PsiElement element = getElement(file, offset);
if (element == null) return PsiElement.EMPTY_ARRAY;
final PsiElement psiElement = PsiTreeUtil.getParentOfType(element, PsiFunctionalExpression.class, PsiMember.class);
if (psiElement instanceof PsiFunctionalExpression) {
final PsiMethod interfaceMethod = LambdaUtil.getFunctionalInterfaceMethod(psiElement);
if (interfaceMethod != null) {
return ArrayUtil.prepend(interfaceMethod, interfaceMethod.findSuperMethods(false));
}
}
final PsiNameIdentifierOwner parent = PsiTreeUtil.getNonStrictParentOfType(element, PsiMethod.class, PsiClass.class);
if (parent == null) {
return PsiElement.EMPTY_ARRAY;
}
return FindSuperElementsHelper.findSuperElements(parent);
}
protected PsiElement getElement(@NotNull PsiFile file, int offset) {
return file.findElementAt(offset);
}
@Override
public boolean startInWriteAction() {
return false;
}
@Override
public void update(@NotNull Editor editor, @NotNull PsiFile file, Presentation presentation) {
final PsiElement element = getElement(file, editor.getCaretModel().getOffset());
final PsiElement containingElement = PsiTreeUtil.getParentOfType(element, PsiFunctionalExpression.class, PsiMember.class);
if (containingElement instanceof PsiClass) {
presentation.setText(ActionsBundle.actionText("GotoSuperClass"));
presentation.setDescription(ActionsBundle.actionDescription("GotoSuperClass"));
}
else {
presentation.setText(ActionsBundle.actionText("GotoSuperMethod"));
presentation.setDescription(ActionsBundle.actionDescription("GotoSuperMethod"));
}
}
}
| goodwinnk/intellij-community | java/java-impl/src/com/intellij/codeInsight/navigation/JavaGotoSuperHandler.java | Java | apache-2.0 | 5,139 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.4.2_11) on Mon Jul 12 21:36:30 CEST 2010 -->
<TITLE>
Uses of Class org.apache.fop.pdf.PDFTTFStream (Apache FOP 1.0 API)
</TITLE>
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
parent.document.title="Uses of Class org.apache.fop.pdf.PDFTTFStream (Apache FOP 1.0 API)";
}
</SCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=3 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/fop/pdf/PDFTTFStream.html" title="class in org.apache.fop.pdf"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
fop 1.0</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html" target="_top"><B>FRAMES</B></A>
<A HREF="PDFTTFStream.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.fop.pdf.PDFTTFStream</B></H2>
</CENTER>
No usage of org.apache.fop.pdf.PDFTTFStream
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=3 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/fop/pdf/PDFTTFStream.html" title="class in org.apache.fop.pdf"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
fop 1.0</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html" target="_top"><B>FRAMES</B></A>
<A HREF="PDFTTFStream.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright 1999-2010 The Apache Software Foundation. All Rights Reserved.
</BODY>
</HTML>
| lewismc/yax | lib/fop-1.0/javadocs/org/apache/fop/pdf/class-use/PDFTTFStream.html | HTML | apache-2.0 | 5,730 |
Base class for queue implementations.
A queue is a TensorFlow data structure that stores tensors across
multiple steps, and exposes operations that enqueue and dequeue
tensors.
Each queue element is a tuple of one or more tensors, where each
tuple component has a static dtype, and may have a static shape. The
queue implementations support versions of enqueue and dequeue that
handle single elements, versions that support enqueuing and
dequeuing a batch of elements at once.
See [`tf.FIFOQueue`](#FIFOQueue) and
[`tf.RandomShuffleQueue`](#RandomShuffleQueue) for concrete
implementations of this class, and instructions on how to create
them.
- - -
#### `tf.QueueBase.enqueue(vals, name=None)` {#QueueBase.enqueue}
Enqueues one element to this queue.
If the queue is full when this operation executes, it will block
until the element has been enqueued.
At runtime, this operation may raise an error if the queue is
[closed](#QueueBase.close) before or during its execution. If the
queue is closed before this operation runs,
`tf.errors.AbortedError` will be raised. If this operation is
blocked, and either (i) the queue is closed by a close operation
with `cancel_pending_enqueues=True`, or (ii) the session is
[closed](../../api_docs/python/client.md#Session.close),
`tf.errors.CancelledError` will be raised.
##### Args:
* <b>`vals`</b>: A tensor, a list or tuple of tensors, or a dictionary containing
the values to enqueue.
* <b>`name`</b>: A name for the operation (optional).
##### Returns:
The operation that enqueues a new tuple of tensors to the queue.
- - -
#### `tf.QueueBase.enqueue_many(vals, name=None)` {#QueueBase.enqueue_many}
Enqueues zero or more elements to this queue.
This operation slices each component tensor along the 0th dimension to
make multiple queue elements. All of the tensors in `vals` must have the
same size in the 0th dimension.
If the queue is full when this operation executes, it will block
until all of the elements have been enqueued.
At runtime, this operation may raise an error if the queue is
[closed](#QueueBase.close) before or during its execution. If the
queue is closed before this operation runs,
`tf.errors.AbortedError` will be raised. If this operation is
blocked, and either (i) the queue is closed by a close operation
with `cancel_pending_enqueues=True`, or (ii) the session is
[closed](../../api_docs/python/client.md#Session.close),
`tf.errors.CancelledError` will be raised.
##### Args:
* <b>`vals`</b>: A tensor, a list or tuple of tensors, or a dictionary
from which the queue elements are taken.
* <b>`name`</b>: A name for the operation (optional).
##### Returns:
The operation that enqueues a batch of tuples of tensors to the queue.
- - -
#### `tf.QueueBase.dequeue(name=None)` {#QueueBase.dequeue}
Dequeues one element from this queue.
If the queue is empty when this operation executes, it will block
until there is an element to dequeue.
At runtime, this operation may raise an error if the queue is
[closed](#QueueBase.close) before or during its execution. If the
queue is closed, the queue is empty, and there are no pending
enqueue operations that can fulfil this request,
`tf.errors.OutOfRangeError` will be raised. If the session is
[closed](../../api_docs/python/client.md#Session.close),
`tf.errors.CancelledError` will be raised.
##### Args:
* <b>`name`</b>: A name for the operation (optional).
##### Returns:
The tuple of tensors that was dequeued.
- - -
#### `tf.QueueBase.dequeue_many(n, name=None)` {#QueueBase.dequeue_many}
Dequeues and concatenates `n` elements from this queue.
This operation concatenates queue-element component tensors along
the 0th dimension to make a single component tensor. All of the
components in the dequeued tuple will have size `n` in the 0th dimension.
If the queue is closed and there are less than `n` elements left, then an
`OutOfRange` exception is raised.
At runtime, this operation may raise an error if the queue is
[closed](#QueueBase.close) before or during its execution. If the
queue is closed, the queue contains fewer than `n` elements, and
there are no pending enqueue operations that can fulfil this
request, `tf.errors.OutOfRangeError` will be raised. If the
session is [closed](../../api_docs/python/client.md#Session.close),
`tf.errors.CancelledError` will be raised.
##### Args:
* <b>`n`</b>: A scalar `Tensor` containing the number of elements to dequeue.
* <b>`name`</b>: A name for the operation (optional).
##### Returns:
The tuple of concatenated tensors that was dequeued.
- - -
#### `tf.QueueBase.size(name=None)` {#QueueBase.size}
Compute the number of elements in this queue.
##### Args:
* <b>`name`</b>: A name for the operation (optional).
##### Returns:
A scalar tensor containing the number of elements in this queue.
- - -
#### `tf.QueueBase.close(cancel_pending_enqueues=False, name=None)` {#QueueBase.close}
Closes this queue.
This operation signals that no more elements will be enqueued in
the given queue. Subsequent `enqueue` and `enqueue_many`
operations will fail. Subsequent `dequeue` and `dequeue_many`
operations will continue to succeed if sufficient elements remain
in the queue. Subsequent `dequeue` and `dequeue_many` operations
that would block will fail immediately.
If `cancel_pending_enqueues` is `True`, all pending requests will also
be cancelled.
##### Args:
* <b>`cancel_pending_enqueues`</b>: (Optional.) A boolean, defaulting to
`False` (described above).
* <b>`name`</b>: A name for the operation (optional).
##### Returns:
The operation that closes the queue.
#### Other Methods
- - -
#### `tf.QueueBase.__init__(dtypes, shapes, names, queue_ref)` {#QueueBase.__init__}
Constructs a queue object from a queue reference.
The two optional lists, `shapes` and `names`, must be of the same length
as `dtypes` if provided. The values at a given index `i` indicate the
shape and name to use for the corresponding queue component in `dtypes`.
##### Args:
* <b>`dtypes`</b>: A list of types. The length of dtypes must equal the number
of tensors in each element.
* <b>`shapes`</b>: Constraints on the shapes of tensors in an element:
A list of shape tuples or None. This list is the same length
as dtypes. If the shape of any tensors in the element are constrained,
all must be; shapes can be None if the shapes should not be constrained.
* <b>`names`</b>: Optional list of names. If provided, the `enqueue()` and
`dequeue()` methods will use dictionaries with these names as keys.
Must be None or a list or tuple of the same length as `dtypes`.
* <b>`queue_ref`</b>: The queue reference, i.e. the output of the queue op.
##### Raises:
* <b>`ValueError`</b>: If one of the arguments is invalid.
- - -
#### `tf.QueueBase.dequeue_up_to(n, name=None)` {#QueueBase.dequeue_up_to}
Dequeues and concatenates `n` elements from this queue.
**Note** This operation is not supported by all queues. If a queue does not
support DequeueUpTo, then a `tf.errors.UnimplementedError` is raised.
This operation concatenates queue-element component tensors along
the 0th dimension to make a single component tensor. If the queue
has not been closed, all of the components in the dequeued tuple
will have size `n` in the 0th dimension.
If the queue is closed and there are more than `0` but fewer than
`n` elements remaining, then instead of raising a
`tf.errors.OutOfRangeError` like [`dequeue_many`](#QueueBase.dequeue_many),
the remaining elements are returned immediately. If the queue is
closed and there are `0` elements left in the queue, then a
`tf.errors.OutOfRangeError` is raised just like in `dequeue_many`.
Otherwise the behavior is identical to `dequeue_many`.
##### Args:
* <b>`n`</b>: A scalar `Tensor` containing the number of elements to dequeue.
* <b>`name`</b>: A name for the operation (optional).
##### Returns:
The tuple of concatenated tensors that was dequeued.
- - -
#### `tf.QueueBase.dtypes` {#QueueBase.dtypes}
The list of dtypes for each component of a queue element.
- - -
#### `tf.QueueBase.from_list(index, queues)` {#QueueBase.from_list}
Create a queue using the queue reference from `queues[index]`.
##### Args:
* <b>`index`</b>: An integer scalar tensor that determines the input that gets
selected.
* <b>`queues`</b>: A list of `QueueBase` objects.
##### Returns:
A `QueueBase` object.
##### Raises:
* <b>`TypeError`</b>: When `queues` is not a list of `QueueBase` objects,
or when the data types of `queues` are not all the same.
- - -
#### `tf.QueueBase.name` {#QueueBase.name}
The name of the underlying queue.
- - -
#### `tf.QueueBase.names` {#QueueBase.names}
The list of names for each component of a queue element.
- - -
#### `tf.QueueBase.queue_ref` {#QueueBase.queue_ref}
The underlying queue reference.
| dhalleine/tensorflow | tensorflow/g3doc/api_docs/python/functions_and_classes/shard6/tf.QueueBase.md | Markdown | apache-2.0 | 8,913 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for RNN cell wrapper v2 implementation."""
from absl.testing import parameterized
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.keras import combinations
from tensorflow.python.keras import layers
from tensorflow.python.keras.layers import rnn_cell_wrapper_v2
from tensorflow.python.keras.layers.legacy_rnn import rnn_cell_impl
from tensorflow.python.keras.legacy_tf_layers import base as legacy_base_layer
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import test
@combinations.generate(combinations.combine(mode=["graph", "eager"]))
class RNNCellWrapperTest(test.TestCase, parameterized.TestCase):
def testResidualWrapper(self):
wrapper_type = rnn_cell_wrapper_v2.ResidualWrapper
x = ops.convert_to_tensor_v2_with_dispatch(
np.array([[1., 1., 1.]]), dtype="float32")
m = ops.convert_to_tensor_v2_with_dispatch(
np.array([[0.1, 0.1, 0.1]]), dtype="float32")
base_cell = rnn_cell_impl.GRUCell(
3, kernel_initializer=init_ops.constant_initializer(0.5),
bias_initializer=init_ops.constant_initializer(0.5))
g, m_new = base_cell(x, m)
wrapper_object = wrapper_type(base_cell)
children = wrapper_object._trackable_children()
wrapper_object.get_config() # Should not throw an error
self.assertIn("cell", children)
self.assertIs(children["cell"], base_cell)
g_res, m_new_res = wrapper_object(x, m)
self.evaluate([variables_lib.global_variables_initializer()])
res = self.evaluate([g, g_res, m_new, m_new_res])
# Residual connections
self.assertAllClose(res[1], res[0] + [1., 1., 1.])
# States are left untouched
self.assertAllClose(res[2], res[3])
def testResidualWrapperWithSlice(self):
wrapper_type = rnn_cell_wrapper_v2.ResidualWrapper
x = ops.convert_to_tensor_v2_with_dispatch(
np.array([[1., 1., 1., 1., 1.]]), dtype="float32")
m = ops.convert_to_tensor_v2_with_dispatch(
np.array([[0.1, 0.1, 0.1]]), dtype="float32")
base_cell = rnn_cell_impl.GRUCell(
3, kernel_initializer=init_ops.constant_initializer(0.5),
bias_initializer=init_ops.constant_initializer(0.5))
g, m_new = base_cell(x, m)
def residual_with_slice_fn(inp, out):
inp_sliced = array_ops.slice(inp, [0, 0], [-1, 3])
return inp_sliced + out
g_res, m_new_res = wrapper_type(
base_cell, residual_with_slice_fn)(x, m)
self.evaluate([variables_lib.global_variables_initializer()])
res_g, res_g_res, res_m_new, res_m_new_res = self.evaluate(
[g, g_res, m_new, m_new_res])
# Residual connections
self.assertAllClose(res_g_res, res_g + [1., 1., 1.])
# States are left untouched
self.assertAllClose(res_m_new, res_m_new_res)
def testDeviceWrapper(self):
wrapper_type = rnn_cell_wrapper_v2.DeviceWrapper
x = array_ops.zeros([1, 3])
m = array_ops.zeros([1, 3])
cell = rnn_cell_impl.GRUCell(3)
wrapped_cell = wrapper_type(cell, "/cpu:0")
children = wrapped_cell._trackable_children()
wrapped_cell.get_config() # Should not throw an error
self.assertIn("cell", children)
self.assertIs(children["cell"], cell)
outputs, _ = wrapped_cell(x, m)
self.assertIn("cpu:0", outputs.device.lower())
@parameterized.parameters(
[[rnn_cell_impl.DropoutWrapper, rnn_cell_wrapper_v2.DropoutWrapper],
[rnn_cell_impl.ResidualWrapper, rnn_cell_wrapper_v2.ResidualWrapper]])
def testWrapperKerasStyle(self, wrapper, wrapper_v2):
"""Tests if wrapper cell is instantiated in keras style scope."""
wrapped_cell_v2 = wrapper_v2(rnn_cell_impl.BasicRNNCell(1))
self.assertIsNone(getattr(wrapped_cell_v2, "_keras_style", None))
wrapped_cell = wrapper(rnn_cell_impl.BasicRNNCell(1))
self.assertFalse(wrapped_cell._keras_style)
@parameterized.parameters(
[rnn_cell_wrapper_v2.DropoutWrapper, rnn_cell_wrapper_v2.ResidualWrapper])
def testWrapperWeights(self, wrapper):
"""Tests that wrapper weights contain wrapped cells weights."""
base_cell = layers.SimpleRNNCell(1, name="basic_rnn_cell")
rnn_cell = wrapper(base_cell)
rnn_layer = layers.RNN(rnn_cell)
inputs = ops.convert_to_tensor_v2_with_dispatch([[[1]]],
dtype=dtypes.float32)
rnn_layer(inputs)
wrapper_name = generic_utils.to_snake_case(wrapper.__name__)
expected_weights = ["rnn/" + wrapper_name + "/" + var for var in
("kernel:0", "recurrent_kernel:0", "bias:0")]
self.assertLen(rnn_cell.weights, 3)
self.assertCountEqual([v.name for v in rnn_cell.weights], expected_weights)
self.assertCountEqual([v.name for v in rnn_cell.trainable_variables],
expected_weights)
self.assertCountEqual([v.name for v in rnn_cell.non_trainable_variables],
[])
self.assertCountEqual([v.name for v in rnn_cell.cell.weights],
expected_weights)
@parameterized.parameters(
[rnn_cell_wrapper_v2.DropoutWrapper, rnn_cell_wrapper_v2.ResidualWrapper])
def testWrapperV2Caller(self, wrapper):
"""Tests that wrapper V2 is using the LayerRNNCell's caller."""
with legacy_base_layer.keras_style_scope():
base_cell = rnn_cell_impl.MultiRNNCell(
[rnn_cell_impl.BasicRNNCell(1) for _ in range(2)])
rnn_cell = wrapper(base_cell)
inputs = ops.convert_to_tensor_v2_with_dispatch([[1]], dtype=dtypes.float32)
state = ops.convert_to_tensor_v2_with_dispatch([[1]], dtype=dtypes.float32)
_ = rnn_cell(inputs, [state, state])
weights = base_cell._cells[0].weights
self.assertLen(weights, expected_len=2)
self.assertTrue(all("_wrapper" in v.name for v in weights))
@parameterized.parameters(
[rnn_cell_wrapper_v2.DropoutWrapper, rnn_cell_wrapper_v2.ResidualWrapper])
def testWrapperV2Build(self, wrapper):
cell = rnn_cell_impl.LSTMCell(10)
wrapper = wrapper(cell)
wrapper.build((1,))
self.assertTrue(cell.built)
def testDeviceWrapperSerialization(self):
wrapper_cls = rnn_cell_wrapper_v2.DeviceWrapper
cell = layers.LSTMCell(10)
wrapper = wrapper_cls(cell, "/cpu:0")
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertDictEqual(config, reconstructed_wrapper.get_config())
self.assertIsInstance(reconstructed_wrapper, wrapper_cls)
def testResidualWrapperSerialization(self):
wrapper_cls = rnn_cell_wrapper_v2.ResidualWrapper
cell = layers.LSTMCell(10)
wrapper = wrapper_cls(cell)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertDictEqual(config, reconstructed_wrapper.get_config())
self.assertIsInstance(reconstructed_wrapper, wrapper_cls)
wrapper = wrapper_cls(cell, residual_fn=lambda i, o: i + i + o)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
# Assert the reconstructed function will perform the math correctly.
self.assertEqual(reconstructed_wrapper._residual_fn(1, 2), 4)
def residual_fn(inputs, outputs):
return inputs * 3 + outputs
wrapper = wrapper_cls(cell, residual_fn=residual_fn)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
# Assert the reconstructed function will perform the math correctly.
self.assertEqual(reconstructed_wrapper._residual_fn(1, 2), 5)
def testDropoutWrapperSerialization(self):
wrapper_cls = rnn_cell_wrapper_v2.DropoutWrapper
cell = layers.GRUCell(10)
wrapper = wrapper_cls(cell)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertDictEqual(config, reconstructed_wrapper.get_config())
self.assertIsInstance(reconstructed_wrapper, wrapper_cls)
wrapper = wrapper_cls(cell, dropout_state_filter_visitor=lambda s: True)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertTrue(reconstructed_wrapper._dropout_state_filter(None))
def dropout_state_filter_visitor(unused_state):
return False
wrapper = wrapper_cls(
cell, dropout_state_filter_visitor=dropout_state_filter_visitor)
config = wrapper.get_config()
reconstructed_wrapper = wrapper_cls.from_config(config)
self.assertFalse(reconstructed_wrapper._dropout_state_filter(None))
def testDropoutWrapperWithKerasLSTMCell(self):
wrapper_cls = rnn_cell_wrapper_v2.DropoutWrapper
cell = layers.LSTMCell(10)
with self.assertRaisesRegex(ValueError, "does not work with "):
wrapper_cls(cell)
cell = layers.LSTMCellV2(10)
with self.assertRaisesRegex(ValueError, "does not work with "):
wrapper_cls(cell)
if __name__ == "__main__":
test.main()
| tensorflow/tensorflow | tensorflow/python/keras/layers/rnn_cell_wrapper_v2_test.py | Python | apache-2.0 | 9,770 |
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.gmf.runtime.common.core.command.CommandResult;
import org.eclipse.gmf.runtime.common.core.command.ICommand;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import org.eclipse.gmf.runtime.emf.type.core.commands.EditElementCommand;
import org.eclipse.gmf.runtime.emf.type.core.requests.ConfigureRequest;
import org.eclipse.gmf.runtime.emf.type.core.requests.CreateElementRequest;
import org.eclipse.gmf.runtime.notation.View;
import org.wso2.developerstudio.eclipse.gmf.esb.CloneTargetContainer;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbFactory;
import org.wso2.developerstudio.eclipse.gmf.esb.MediatorFlow;
/**
* @generated
*/
public class MediatorFlow11CreateCommand extends EditElementCommand {
/**
* @generated
*/
public MediatorFlow11CreateCommand(CreateElementRequest req) {
super(req.getLabel(), null, req);
}
/**
* FIXME: replace with setElementToEdit()
* @generated
*/
protected EObject getElementToEdit() {
EObject container = ((CreateElementRequest) getRequest()).getContainer();
if (container instanceof View) {
container = ((View) container).getElement();
}
return container;
}
/**
* @generated
*/
public boolean canExecute() {
CloneTargetContainer container = (CloneTargetContainer) getElementToEdit();
if (container.getMediatorFlow() != null) {
return false;
}
return true;
}
/**
* @generated
*/
protected CommandResult doExecuteWithResult(IProgressMonitor monitor, IAdaptable info)
throws ExecutionException {
MediatorFlow newElement = EsbFactory.eINSTANCE.createMediatorFlow();
CloneTargetContainer owner = (CloneTargetContainer) getElementToEdit();
owner.setMediatorFlow(newElement);
doConfigure(newElement, monitor, info);
((CreateElementRequest) getRequest()).setNewElement(newElement);
return CommandResult.newOKCommandResult(newElement);
}
/**
* @generated
*/
protected void doConfigure(MediatorFlow newElement, IProgressMonitor monitor, IAdaptable info)
throws ExecutionException {
IElementType elementType = ((CreateElementRequest) getRequest()).getElementType();
ConfigureRequest configureRequest = new ConfigureRequest(getEditingDomain(), newElement,
elementType);
configureRequest.setClientContext(((CreateElementRequest) getRequest()).getClientContext());
configureRequest.addParameters(getRequest().getParameters());
ICommand configureCommand = elementType.getEditCommand(configureRequest);
if (configureCommand != null && configureCommand.canExecute()) {
configureCommand.execute(monitor, info);
}
}
}
| chanakaudaya/developer-studio | esb/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/edit/commands/MediatorFlow11CreateCommand.java | Java | apache-2.0 | 2,841 |
package brooklyn.location.jclouds.pool;
import java.util.Map;
import org.jclouds.compute.domain.NodeMetadata;
import org.jclouds.compute.domain.Processor;
import org.jclouds.domain.Location;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.base.Throwables;
public class MachinePoolPredicates {
private static final Logger log = LoggerFactory.getLogger(MachinePoolPredicates.class);
public static Predicate<NodeMetadata> except(final MachineSet removedItems) {
return new Predicate<NodeMetadata>() {
@Override
public boolean apply(NodeMetadata input) {
return !removedItems.contains(input);
}
};
}
public static Predicate<NodeMetadata> except(final Predicate<NodeMetadata> predicateToExclude) {
return Predicates.not(predicateToExclude);
}
public static Predicate<NodeMetadata> matching(final ReusableMachineTemplate template) {
return new Predicate<NodeMetadata>() {
@Override
public boolean apply(NodeMetadata input) {
return matches(template, input);
}
};
}
public static Predicate<NodeMetadata> withTag(final String tag) {
return new Predicate<NodeMetadata>() {
@Override
public boolean apply(NodeMetadata input) {
return input.getTags().contains(tag);
}
};
}
public static Predicate<NodeMetadata> compose(final Predicate<NodeMetadata> ...predicates) {
return Predicates.and(predicates);
}
/** True iff the node matches the criteria specified in this template.
* <p>
* NB: This only checks some of the most common fields,
* plus a hashcode (in strict mode).
* In strict mode you're practically guaranteed to match only machines created by this template.
* (Add a tag(uid) and you _will_ be guaranteed, strict mode or not.)
* <p>
* Outside strict mode, some things (OS and hypervisor) can fall through the gaps.
* But if that is a problem we can easily add them in.
* <p>
* (Caveat: If explicit Hardware, Image, and/or Template were specified in the template,
* then the hash code probably will not detect it.)
**/
public static boolean matches(ReusableMachineTemplate template, NodeMetadata m) {
try {
// tags and user metadata
if (! m.getTags().containsAll( template.getTags(false) )) return false;
if (! isSubMapOf(template.getUserMetadata(false), m.getUserMetadata())) return false;
// common hardware parameters
if (template.getMinRam()!=null && m.getHardware().getRam() < template.getMinRam()) return false;
if (template.getMinCores()!=null) {
double numCores = 0;
for (Processor p: m.getHardware().getProcessors()) numCores += p.getCores();
if (numCores+0.001 < template.getMinCores()) return false;
}
if (template.getIs64bit()!=null) {
if (m.getOperatingSystem().is64Bit() != template.getIs64bit()) return false;
}
if (template.getOsFamily()!=null) {
if (m.getOperatingSystem() == null ||
!template.getOsFamily().equals(m.getOperatingSystem().getFamily())) return false;
}
if (template.getOsNameMatchesRegex()!=null) {
if (m.getOperatingSystem() == null || m.getOperatingSystem().getName()==null ||
!m.getOperatingSystem().getName().matches(template.getOsNameMatchesRegex())) return false;
}
if (template.getLocationId()!=null) {
if (!isLocationContainedIn(m.getLocation(), template.getLocationId())) return false;
}
// TODO other TemplateBuilder fields and TemplateOptions
return true;
} catch (Exception e) {
log.warn("Error (rethrowing) trying to match "+m+" against "+template+": "+e, e);
throw Throwables.propagate(e);
}
}
private static boolean isLocationContainedIn(Location location, String locationId) {
if (location==null) return false;
if (locationId.equals(location.getId())) return true;
return isLocationContainedIn(location.getParent(), locationId);
}
public static boolean isSubMapOf(Map<String, String> sub, Map<String, String> bigger) {
for (Map.Entry<String, String> e: sub.entrySet()) {
if (e.getValue()==null) {
if (!bigger.containsKey(e.getKey())) return false;
if (bigger.get(e.getKey())!=null) return false;
} else {
if (!e.getValue().equals(bigger.get(e.getKey()))) return false;
}
}
return true;
}
}
| bmwshop/brooklyn | locations/jclouds/src/main/java/brooklyn/location/jclouds/pool/MachinePoolPredicates.java | Java | apache-2.0 | 5,001 |
# encoding: utf-8
module RuboCop
module Cop
module Lint
# This cop checks for uses of the deprecated class method usages.
class DeprecatedClassMethods < Cop
include AST::Sexp
MSG = '`%s` is deprecated in favor of `%s`.'
DEPRECATED_METHODS = [
[:File, :exists?, :exist?],
[:Dir, :exists?, :exist?]
]
def on_send(node)
receiver, method_name, *_args = *node
DEPRECATED_METHODS.each do |data|
next unless class_nodes(data).include?(receiver)
next unless method_name == data[1]
add_offense(node, :selector,
format(MSG,
deprecated_method(data),
replacement_method(data)))
end
end
def autocorrect(node)
lambda do |corrector|
receiver, method_name, *_args = *node
DEPRECATED_METHODS.each do |data|
next unless class_nodes(data).include?(receiver)
next unless method_name == data[1]
corrector.replace(node.loc.selector,
data[2].to_s)
end
end
end
private
def class_nodes(data)
[s(:const, nil, data[0]),
s(:const, s(:cbase), data[0])]
end
def deprecated_method(data)
format('%s.%s', data[0], data[1])
end
def replacement_method(data)
format('%s.%s', data[0], data[2])
end
end
end
end
end
| cpbuckingham/cpbuckingham.github.io | vendor/bundle/gems/rubocop-0.32.1/lib/rubocop/cop/lint/deprecated_class_methods.rb | Ruby | apache-2.0 | 1,583 |
/* $Id$ */
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.documentum.fc.client;
/** Stub interface to allow the connector to build fully.
*/
public class DfIdentityException extends DfServiceException
{
}
| kishorejangid/manifoldcf | connectors/documentum/build-stub/src/main/java/com/documentum/fc/client/DfIdentityException.java | Java | apache-2.0 | 954 |
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* X11 Video Redirection
*
* Copyright 2010-2011 Vic Lee
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/ipc.h>
#include <sys/shm.h>
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <X11/Xatom.h>
#include <X11/extensions/XShm.h>
#include <winpr/crt.h>
#include <freerdp/utils/event.h>
#include <freerdp/client/tsmf.h>
#include "xf_tsmf.h"
#ifdef WITH_XV
#include <X11/extensions/Xv.h>
#include <X11/extensions/Xvlib.h>
typedef struct xf_xv_context xfXvContext;
struct xf_xv_context
{
long xv_port;
Atom xv_colorkey_atom;
int xv_image_size;
int xv_shmid;
char* xv_shmaddr;
UINT32* xv_pixfmts;
};
#ifdef WITH_DEBUG_XV
#define DEBUG_XV(fmt, ...) DEBUG_CLASS(XV, fmt, ## __VA_ARGS__)
#else
#define DEBUG_XV(fmt, ...) DEBUG_NULL(fmt, ## __VA_ARGS__)
#endif
void xf_tsmf_init(xfContext* xfc, long xv_port)
{
int ret;
unsigned int i;
unsigned int version;
unsigned int release;
unsigned int event_base;
unsigned int error_base;
unsigned int request_base;
unsigned int num_adaptors;
xfXvContext* xv;
XvAdaptorInfo* ai;
XvAttribute* attr;
XvImageFormatValues* fo;
xv = (xfXvContext*) malloc(sizeof(xfXvContext));
ZeroMemory(xv, sizeof(xfXvContext));
xfc->xv_context = xv;
xv->xv_colorkey_atom = None;
xv->xv_image_size = 0;
xv->xv_port = xv_port;
if (!XShmQueryExtension(xfc->display))
{
DEBUG_XV("no shmem available.");
return;
}
ret = XvQueryExtension(xfc->display, &version, &release, &request_base, &event_base, &error_base);
if (ret != Success)
{
DEBUG_XV("XvQueryExtension failed %d.", ret);
return;
}
DEBUG_XV("version %u release %u", version, release);
ret = XvQueryAdaptors(xfc->display, DefaultRootWindow(xfc->display),
&num_adaptors, &ai);
if (ret != Success)
{
DEBUG_XV("XvQueryAdaptors failed %d.", ret);
return;
}
for (i = 0; i < num_adaptors; i++)
{
DEBUG_XV("adapter port %ld-%ld (%s)", ai[i].base_id,
ai[i].base_id + ai[i].num_ports - 1, ai[i].name);
if (xv->xv_port == 0 && i == num_adaptors - 1)
xv->xv_port = ai[i].base_id;
}
if (num_adaptors > 0)
XvFreeAdaptorInfo(ai);
if (xv->xv_port == 0)
{
DEBUG_XV("no adapter selected, video frames will not be processed.");
return;
}
DEBUG_XV("selected %ld", xv->xv_port);
attr = XvQueryPortAttributes(xfc->display, xv->xv_port, &ret);
for (i = 0; i < (unsigned int)ret; i++)
{
if (strcmp(attr[i].name, "XV_COLORKEY") == 0)
{
xv->xv_colorkey_atom = XInternAtom(xfc->display, "XV_COLORKEY", FALSE);
XvSetPortAttribute(xfc->display, xv->xv_port, xv->xv_colorkey_atom, attr[i].min_value + 1);
break;
}
}
XFree(attr);
#ifdef WITH_DEBUG_XV
fprintf(stderr, "xf_tsmf_init: pixel format ");
#endif
fo = XvListImageFormats(xfc->display, xv->xv_port, &ret);
if (ret > 0)
{
xv->xv_pixfmts = (UINT32*) malloc((ret + 1) * sizeof(UINT32));
ZeroMemory(xv->xv_pixfmts, (ret + 1) * sizeof(UINT32));
for (i = 0; i < ret; i++)
{
xv->xv_pixfmts[i] = fo[i].id;
#ifdef WITH_DEBUG_XV
fprintf(stderr, "%c%c%c%c ", ((char*)(xv->xv_pixfmts + i))[0], ((char*)(xv->xv_pixfmts + i))[1],
((char*)(xv->xv_pixfmts + i))[2], ((char*)(xv->xv_pixfmts + i))[3]);
#endif
}
xv->xv_pixfmts[i] = 0;
}
XFree(fo);
#ifdef WITH_DEBUG_XV
fprintf(stderr, "\n");
#endif
}
void xf_tsmf_uninit(xfContext* xfc)
{
xfXvContext* xv = (xfXvContext*) xfc->xv_context;
if (xv)
{
if (xv->xv_image_size > 0)
{
shmdt(xv->xv_shmaddr);
shmctl(xv->xv_shmid, IPC_RMID, NULL);
}
if (xv->xv_pixfmts)
{
free(xv->xv_pixfmts);
xv->xv_pixfmts = NULL;
}
free(xv);
xfc->xv_context = NULL;
}
}
static BOOL
xf_tsmf_is_format_supported(xfXvContext* xv, UINT32 pixfmt)
{
int i;
if (!xv->xv_pixfmts)
return FALSE;
for (i = 0; xv->xv_pixfmts[i]; i++)
{
if (xv->xv_pixfmts[i] == pixfmt)
return TRUE;
}
return FALSE;
}
static void xf_process_tsmf_video_frame_event(xfContext* xfc, RDP_VIDEO_FRAME_EVENT* vevent)
{
int i;
BYTE* data1;
BYTE* data2;
UINT32 pixfmt;
UINT32 xvpixfmt;
BOOL converti420yv12 = FALSE;
XvImage * image;
int colorkey = 0;
XShmSegmentInfo shminfo;
xfXvContext* xv = (xfXvContext*) xfc->xv_context;
if (xv->xv_port == 0)
return;
/* In case the player is minimized */
if (vevent->x < -2048 || vevent->y < -2048 || vevent->num_visible_rects <= 0)
return;
if (xv->xv_colorkey_atom != None)
{
XvGetPortAttribute(xfc->display, xv->xv_port, xv->xv_colorkey_atom, &colorkey);
XSetFunction(xfc->display, xfc->gc, GXcopy);
XSetFillStyle(xfc->display, xfc->gc, FillSolid);
XSetForeground(xfc->display, xfc->gc, colorkey);
for (i = 0; i < vevent->num_visible_rects; i++)
{
XFillRectangle(xfc->display, xfc->window->handle, xfc->gc,
vevent->x + vevent->visible_rects[i].x,
vevent->y + vevent->visible_rects[i].y,
vevent->visible_rects[i].width,
vevent->visible_rects[i].height);
}
}
else
{
XSetClipRectangles(xfc->display, xfc->gc, vevent->x, vevent->y,
(XRectangle*) vevent->visible_rects, vevent->num_visible_rects, YXBanded);
}
pixfmt = vevent->frame_pixfmt;
if (xf_tsmf_is_format_supported(xv, pixfmt))
{
xvpixfmt = pixfmt;
}
else if (pixfmt == RDP_PIXFMT_I420 && xf_tsmf_is_format_supported(xv, RDP_PIXFMT_YV12))
{
xvpixfmt = RDP_PIXFMT_YV12;
converti420yv12 = TRUE;
}
else if (pixfmt == RDP_PIXFMT_YV12 && xf_tsmf_is_format_supported(xv, RDP_PIXFMT_I420))
{
xvpixfmt = RDP_PIXFMT_I420;
converti420yv12 = TRUE;
}
else
{
DEBUG_XV("pixel format 0x%X not supported by hardware.", pixfmt);
return;
}
image = XvShmCreateImage(xfc->display, xv->xv_port,
xvpixfmt, 0, vevent->frame_width, vevent->frame_height, &shminfo);
if (xv->xv_image_size != image->data_size)
{
if (xv->xv_image_size > 0)
{
shmdt(xv->xv_shmaddr);
shmctl(xv->xv_shmid, IPC_RMID, NULL);
}
xv->xv_image_size = image->data_size;
xv->xv_shmid = shmget(IPC_PRIVATE, image->data_size, IPC_CREAT | 0777);
xv->xv_shmaddr = shmat(xv->xv_shmid, 0, 0);
}
shminfo.shmid = xv->xv_shmid;
shminfo.shmaddr = image->data = xv->xv_shmaddr;
shminfo.readOnly = FALSE;
if (!XShmAttach(xfc->display, &shminfo))
{
XFree(image);
DEBUG_XV("XShmAttach failed.");
return;
}
/* The video driver may align each line to a different size
and we need to convert our original image data. */
switch (pixfmt)
{
case RDP_PIXFMT_I420:
case RDP_PIXFMT_YV12:
/* Y */
if (image->pitches[0] == vevent->frame_width)
{
memcpy(image->data + image->offsets[0],
vevent->frame_data,
vevent->frame_width * vevent->frame_height);
}
else
{
for (i = 0; i < vevent->frame_height; i++)
{
memcpy(image->data + image->offsets[0] + i * image->pitches[0],
vevent->frame_data + i * vevent->frame_width,
vevent->frame_width);
}
}
/* UV */
/* Conversion between I420 and YV12 is to simply swap U and V */
if (converti420yv12 == FALSE)
{
data1 = vevent->frame_data + vevent->frame_width * vevent->frame_height;
data2 = vevent->frame_data + vevent->frame_width * vevent->frame_height +
vevent->frame_width * vevent->frame_height / 4;
}
else
{
data2 = vevent->frame_data + vevent->frame_width * vevent->frame_height;
data1 = vevent->frame_data + vevent->frame_width * vevent->frame_height +
vevent->frame_width * vevent->frame_height / 4;
image->id = pixfmt == RDP_PIXFMT_I420 ? RDP_PIXFMT_YV12 : RDP_PIXFMT_I420;
}
if (image->pitches[1] * 2 == vevent->frame_width)
{
memcpy(image->data + image->offsets[1],
data1,
vevent->frame_width * vevent->frame_height / 4);
memcpy(image->data + image->offsets[2],
data2,
vevent->frame_width * vevent->frame_height / 4);
}
else
{
for (i = 0; i < vevent->frame_height / 2; i++)
{
memcpy(image->data + image->offsets[1] + i * image->pitches[1],
data1 + i * vevent->frame_width / 2,
vevent->frame_width / 2);
memcpy(image->data + image->offsets[2] + i * image->pitches[2],
data2 + i * vevent->frame_width / 2,
vevent->frame_width / 2);
}
}
break;
default:
memcpy(image->data, vevent->frame_data, image->data_size <= vevent->frame_size ?
image->data_size : vevent->frame_size);
break;
}
XvShmPutImage(xfc->display, xv->xv_port, xfc->window->handle, xfc->gc, image,
0, 0, image->width, image->height,
vevent->x, vevent->y, vevent->width, vevent->height, FALSE);
if (xv->xv_colorkey_atom == None)
XSetClipMask(xfc->display, xfc->gc, None);
XSync(xfc->display, FALSE);
XShmDetach(xfc->display, &shminfo);
XFree(image);
}
static void xf_process_tsmf_redraw_event(xfContext* xfc, RDP_REDRAW_EVENT* revent)
{
XSetFunction(xfc->display, xfc->gc, GXcopy);
XSetFillStyle(xfc->display, xfc->gc, FillSolid);
XCopyArea(xfc->display, xfc->primary, xfc->window->handle, xfc->gc,
revent->x, revent->y, revent->width, revent->height, revent->x, revent->y);
}
void xf_process_tsmf_event(xfContext* xfc, wMessage* event)
{
switch (GetMessageType(event->id))
{
case TsmfChannel_VideoFrame:
xf_process_tsmf_video_frame_event(xfc, (RDP_VIDEO_FRAME_EVENT*) event);
break;
case TsmfChannel_Redraw:
xf_process_tsmf_redraw_event(xfc, (RDP_REDRAW_EVENT*) event);
break;
}
}
#else /* WITH_XV */
void xf_tsmf_init(xfContext* xfc, long xv_port)
{
}
void xf_tsmf_uninit(xfContext* xfc)
{
}
void xf_process_tsmf_event(xfContext* xfc, wMessage* event)
{
}
#endif /* WITH_XV */
| 0359xiaodong/FreeRDP | client/X11/xf_tsmf.c | C | apache-2.0 | 10,083 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.server;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import io.airlift.configuration.Config;
import io.airlift.resolver.ArtifactResolver;
import javax.validation.constraints.NotNull;
import java.util.List;
public class DevelopmentLoaderConfig
{
private static final Splitter SPLITTER = Splitter.on(',').omitEmptyStrings().trimResults();
private List<String> plugins = ImmutableList.of();
private String mavenLocalRepository = ArtifactResolver.USER_LOCAL_REPO;
private List<String> mavenRemoteRepository = ImmutableList.of(ArtifactResolver.MAVEN_CENTRAL_URI);
public List<String> getPlugins()
{
return plugins;
}
public DevelopmentLoaderConfig setPlugins(List<String> plugins)
{
this.plugins = ImmutableList.copyOf(plugins);
return this;
}
@Config("plugin.bundles")
public DevelopmentLoaderConfig setPlugins(String plugins)
{
this.plugins = SPLITTER.splitToList(plugins);
return this;
}
@NotNull
public String getMavenLocalRepository()
{
return mavenLocalRepository;
}
@Config("maven.repo.local")
public DevelopmentLoaderConfig setMavenLocalRepository(String mavenLocalRepository)
{
this.mavenLocalRepository = mavenLocalRepository;
return this;
}
@NotNull
public List<String> getMavenRemoteRepository()
{
return mavenRemoteRepository;
}
public DevelopmentLoaderConfig setMavenRemoteRepository(List<String> mavenRemoteRepository)
{
this.mavenRemoteRepository = mavenRemoteRepository;
return this;
}
@Config("maven.repo.remote")
public DevelopmentLoaderConfig setMavenRemoteRepository(String mavenRemoteRepository)
{
this.mavenRemoteRepository = ImmutableList.copyOf(Splitter.on(',').omitEmptyStrings().trimResults().split(mavenRemoteRepository));
return this;
}
}
| electrum/presto | testing/trino-server-dev/src/main/java/io/trino/server/DevelopmentLoaderConfig.java | Java | apache-2.0 | 2,534 |
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.portlets.statistics;
import java.util.Collections;
import java.util.List;
import org.jasig.portal.events.aggr.portletlayout.PortletLayoutAggregation;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.portlet.ModelAndView;
import org.springframework.web.portlet.bind.annotation.RenderMapping;
import org.springframework.web.portlet.bind.annotation.ResourceMapping;
import com.google.visualization.datasource.base.TypeMismatchException;
import com.google.visualization.datasource.datatable.value.NumberValue;
import com.google.visualization.datasource.datatable.value.Value;
/**
* @author Chris Waymire <cwaymire@unicon.net>
*/
@Controller
@RequestMapping(value="VIEW")
public class PortletMoveStatisticsController extends BasePortletLayoutStatisticsController<PortletMoveReportForm> {
private static final String DATA_TABLE_RESOURCE_ID = "portletMoveData";
private static final String REPORT_NAME = "portletMove.totals";
@RenderMapping(value="MAXIMIZED", params="report=" + REPORT_NAME)
public String getLoginView() throws TypeMismatchException {
return super.getLoginView();
}
@ResourceMapping(DATA_TABLE_RESOURCE_ID)
public ModelAndView renderPortletAddAggregationReport(PortletMoveReportForm form) throws TypeMismatchException {
return super.renderPortletAddAggregationReport(form);
}
@Override
public String getReportName() {
return REPORT_NAME;
}
@Override
public String getReportDataResourceId() {
return DATA_TABLE_RESOURCE_ID;
}
@Override
protected List<Value> createRowValues(PortletLayoutAggregation aggr, PortletMoveReportForm form) {
int count = aggr != null ? aggr.getMoveCount() : 0;
return Collections.<Value>singletonList(new NumberValue(count));
}
} | pspaude/uPortal | uportal-war/src/main/java/org/jasig/portal/portlets/statistics/PortletMoveStatisticsController.java | Java | apache-2.0 | 2,682 |
//
// CAEmitterBehavior+TFEasyCoder.h
// TFEasyCoder
//
// Created by ztf on 16/10/26.
// Copyright © 2016年 ztf. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#import "TFEasyCoderConst.h"
typedef void(^CAEmitterBehaviorEasyCoderBlock) (CAEmitterBehavior * ins);
@interface CAEmitterBehavior (TFEasyCoder)
+( CAEmitterBehavior *)easyCoder:(CAEmitterBehaviorEasyCoderBlock)block;
-(CAEmitterBehavior *)easyCoder:(CAEmitterBehaviorEasyCoderBlock)block;
-(CAEmitterBehavior *(^)(NSString * name))set_name;
-(CAEmitterBehavior *(^)(BOOL enabled))set_enabled;
//superclass pros NSObject
-(CAEmitterBehavior *(^)(NSArray * accessibilityElements))set_accessibilityElements;
-(CAEmitterBehavior *(^)(NSArray * accessibilityCustomActions))set_accessibilityCustomActions;
-(CAEmitterBehavior *(^)(BOOL isAccessibilityElement))set_isAccessibilityElement;
-(CAEmitterBehavior *(^)(NSString * accessibilityLabel))set_accessibilityLabel;
-(CAEmitterBehavior *(^)(NSString * accessibilityHint))set_accessibilityHint;
-(CAEmitterBehavior *(^)(NSString * accessibilityValue))set_accessibilityValue;
-(CAEmitterBehavior *(^)(unsigned long long accessibilityTraits))set_accessibilityTraits;
-(CAEmitterBehavior *(^)(UIBezierPath * accessibilityPath))set_accessibilityPath;
-(CAEmitterBehavior *(^)(CGPoint accessibilityActivationPoint))set_accessibilityActivationPoint;
-(CAEmitterBehavior *(^)(NSString * accessibilityLanguage))set_accessibilityLanguage;
-(CAEmitterBehavior *(^)(BOOL accessibilityElementsHidden))set_accessibilityElementsHidden;
-(CAEmitterBehavior *(^)(BOOL accessibilityViewIsModal))set_accessibilityViewIsModal;
-(CAEmitterBehavior *(^)(BOOL shouldGroupAccessibilityChildren))set_shouldGroupAccessibilityChildren;
-(CAEmitterBehavior *(^)(long long accessibilityNavigationStyle))set_accessibilityNavigationStyle;
-(CAEmitterBehavior *(^)(id value,NSString *key))set_ValueKey;
@end | shmxybfq/TFDemos | demo-转场动画/demo-转场动画/TFEasyCoder/ca/CAEmitterBehavior+TFEasyCoder.h | C | apache-2.0 | 1,974 |
<!DOCTYPE html>
<html>
<head>
<title>sidebar-v2 example</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
<link href="http://maxcdn.bootstrapcdn.com/font-awesome/4.1.0/css/font-awesome.min.css" rel="stylesheet">
<link rel="stylesheet" href="http://cdn.leafletjs.com/leaflet-0.7.2/leaflet.css" />
<!--[if lte IE 8]><link rel="stylesheet" href="http://cdn.leafletjs.com/leaflet-0.7.2/leaflet.ie.css" /><![endif]-->
<link rel="stylesheet" href="../css/leaflet-sidebar.css" />
<style>
body {
padding: 0;
margin: 0;
}
html, body, #map {
height: 100%;
font: 10pt "Helvetica Neue", Arial, Helvetica, sans-serif;
}
.lorem {
font-style: italic;
color: #AAA;
}
</style>
</head>
<body>
<div id="sidebar" class="sidebar collapsed">
<!-- Nav tabs -->
<div class="sidebar-tabs">
<ul role="tablist">
<li><a href="#home" role="tab"><i class="fa fa-bars"></i></a></li>
<li><a href="#profile" role="tab"><i class="fa fa-user"></i></a></li>
<li class="disabled"><a href="#messages" role="tab"><i class="fa fa-envelope"></i></a></li>
</ul>
<ul role="tablist">
<li><a href="#settings" role="tab"><i class="fa fa-gear"></i></a></li>
</ul>
</div>
<!-- Tab panes -->
<div class="sidebar-content">
<div class="sidebar-pane" id="home">
<h1 class="sidebar-header">
sidebar-v2
<div class="sidebar-close"><i class="fa fa-caret-left"></i></div>
</h1>
<p>A responsive sidebar for mapping libraries like <a href="http://leafletjs.com/">Leaflet</a> or <a href="http://openlayers.org/">OpenLayers</a>.</p>
<p class="lorem">Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.</p>
<p class="lorem">Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.</p>
<p class="lorem">Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.</p>
<p class="lorem">Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.</p>
</div>
<div class="sidebar-pane" id="profile">
<h1 class="sidebar-header">Profile<div class="sidebar-close"><i class="fa fa-caret-left"></i></div></h1>
</div>
<div class="sidebar-pane" id="messages">
<h1 class="sidebar-header">Messages<div class="sidebar-close"><i class="fa fa-caret-left"></i></div></h1>
</div>
<div class="sidebar-pane" id="settings">
<h1 class="sidebar-header">Settings<div class="sidebar-close"><i class="fa fa-caret-left"></i></div></h1>
</div>
</div>
</div>
<div id="map" class="sidebar-map"></div>
<a href="https://github.com/Turbo87/sidebar-v2/"><img style="position: fixed; top: 0; right: 0; border: 0;" src="https://s3.amazonaws.com/github/ribbons/forkme_right_darkblue_121621.png" alt="Fork me on GitHub"></a>
<script src="http://cdn.leafletjs.com/leaflet-0.7.2/leaflet.js"></script>
<script src="../js/leaflet-sidebar.js"></script>
<script>
var map = L.map('map');
map.setView([51.2, 7], 9);
L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
maxZoom: 18,
attribution: 'Map data © OpenStreetMap contributors'
}).addTo(map);
var marker = L.marker([51.2, 7]).addTo(map);
var sidebar = L.control.sidebar('sidebar').addTo(map);
</script>
</body>
</html>
| sethmbaker/fewsn-web | wsgi/static/leaflet-sidebar-v2/examples/index.html | HTML | apache-2.0 | 5,925 |
#!/usr/bin/env python
# example checkbutton.py
import pygtk
pygtk.require('2.0')
import gtk
class CheckButton:
# Our callback.
# The data passed to this method is printed to stdout
def callback(self, widget, data=None):
print "%s was toggled %s" % (data, ("OFF", "ON")[widget.get_active()])
# This callback quits the program
def delete_event(self, widget, event, data=None):
gtk.main_quit()
return False
def __init__(self):
# Create a new window
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
# Set the window title
self.window.set_title("Check Button")
# Set a handler for delete_event that immediately
# exits GTK.
self.window.connect("delete_event", self.delete_event)
# Sets the border width of the window.
self.window.set_border_width(20)
# Create a vertical box
vbox = gtk.VBox(True, 2)
# Put the vbox in the main window
self.window.add(vbox)
# Create first button
button = gtk.CheckButton("check button 1")
# When the button is toggled, we call the "callback" method
# with a pointer to "button" as its argument
button.connect("toggled", self.callback, "check button 1")
# Insert button 1
vbox.pack_start(button, True, True, 2)
button.show()
# Create second button
button = gtk.CheckButton("check button 2")
# When the button is toggled, we call the "callback" method
# with a pointer to "button 2" as its argument
button.connect("toggled", self.callback, "check button 2")
# Insert button 2
vbox.pack_start(button, True, True, 2)
button.show()
# Create "Quit" button
button = gtk.Button("Quit")
# When the button is clicked, we call the mainquit function
# and the program exits
button.connect("clicked", lambda wid: gtk.main_quit())
# Insert the quit button
vbox.pack_start(button, True, True, 2)
button.show()
vbox.show()
self.window.show()
def main():
gtk.main()
return 0
if __name__ == "__main__":
CheckButton()
main()
| spaceone/pyjs | pygtkweb/demos/checkbutton.py | Python | apache-2.0 | 2,231 |
/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.entitySystem.prefab.internal;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.terasology.assets.AssetType;
import org.terasology.assets.ResourceUrn;
import org.terasology.entitySystem.Component;
import org.terasology.entitySystem.prefab.Prefab;
import org.terasology.entitySystem.prefab.PrefabData;
import java.util.List;
import java.util.Map;
/**
* @author Immortius
*/
public class PojoPrefab extends Prefab {
private Prefab parent;
private Map<Class<? extends Component>, Component> componentMap;
private List<Prefab> children = Lists.newArrayList();
private boolean persisted;
private boolean alwaysRelevant = true;
public PojoPrefab(ResourceUrn urn, AssetType<?, PrefabData> assetType, PrefabData data) {
super(urn, assetType);
reload(data);
}
@Override
public Prefab getParent() {
return parent;
}
@Override
public List<Prefab> getChildren() {
return ImmutableList.copyOf(children);
}
@Override
public boolean isPersisted() {
return persisted;
}
@Override
public boolean isAlwaysRelevant() {
return alwaysRelevant;
}
@Override
public boolean exists() {
return true;
}
@Override
public boolean hasComponent(Class<? extends Component> component) {
return componentMap.containsKey(component);
}
@Override
public <T extends Component> T getComponent(Class<T> componentClass) {
return componentClass.cast(componentMap.get(componentClass));
}
@Override
public Iterable<Component> iterateComponents() {
return ImmutableList.copyOf(componentMap.values());
}
@Override
protected void doDispose() {
}
@Override
protected void doReload(PrefabData data) {
this.componentMap = ImmutableMap.copyOf(data.getComponents());
this.persisted = data.isPersisted();
this.alwaysRelevant = data.isAlwaysRelevant();
this.parent = data.getParent();
if (parent != null && parent instanceof PojoPrefab) {
((PojoPrefab) parent).children.add(this);
}
}
}
| immortius/Terasology | engine/src/main/java/org/terasology/entitySystem/prefab/internal/PojoPrefab.java | Java | apache-2.0 | 2,855 |
# `redis:5.0.14`
## Docker Metadata
- Image ID: `sha256:73c4ad8da261b4929be877b8a775885aee54394c5f112158074221ba5ee2de32`
- Created: `2022-03-03T12:16:24.251208012Z`
- Virtual Size: ~ 109.62 Mb
(total size of all layers on-disk)
- Arch: `linux`/`amd64`
- Entrypoint: `["docker-entrypoint.sh"]`
- Command: `["redis-server"]`
- Environment:
- `PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin`
- `GOSU_VERSION=1.14`
- `REDIS_VERSION=5.0.14`
- `REDIS_DOWNLOAD_URL=http://download.redis.io/releases/redis-5.0.14.tar.gz`
- `REDIS_DOWNLOAD_SHA=3ea5024766d983249e80d4aa9457c897a9f079957d0fb1f35682df233f997f32`
## `dpkg` (`.deb`-based packages)
### `dpkg` source package: `acl=2.2.53-10`
Binary Packages:
- `libacl1:amd64=2.2.53-10`
Licenses: (parsed from: `/usr/share/doc/libacl1/copyright`)
- `GPL-2`
- `GPL-2+`
- `LGPL-2+`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris acl=2.2.53-10
'http://deb.debian.org/debian/pool/main/a/acl/acl_2.2.53-10.dsc' acl_2.2.53-10.dsc 2468 SHA256:09204a89156b17a3603b2ce34b3c7b1a9fd7345086c787962188d95347918c59
'http://deb.debian.org/debian/pool/main/a/acl/acl_2.2.53.orig.tar.gz' acl_2.2.53.orig.tar.gz 524300 SHA256:06be9865c6f418d851ff4494e12406568353b891ffe1f596b34693c387af26c7
'http://deb.debian.org/debian/pool/main/a/acl/acl_2.2.53.orig.tar.gz.asc' acl_2.2.53.orig.tar.gz.asc 833 SHA256:06849bece0b56a6a7269173abe101cff223bb9346d74027a3cd5ff80914abf4b
'http://deb.debian.org/debian/pool/main/a/acl/acl_2.2.53-10.debian.tar.xz' acl_2.2.53-10.debian.tar.xz 25536 SHA256:6b83a626aa383334b64666181642c7c13e44a6fe65486d0aaa34bd8de6d58b20
```
Other potentially useful URLs:
- https://sources.debian.net/src/acl/2.2.53-10/ (for browsing the source)
- https://sources.debian.net/src/acl/2.2.53-10/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/acl/2.2.53-10/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `adduser=3.118`
Binary Packages:
- `adduser=3.118`
Licenses: (parsed from: `/usr/share/doc/adduser/copyright`)
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris adduser=3.118
'http://deb.debian.org/debian/pool/main/a/adduser/adduser_3.118.dsc' adduser_3.118.dsc 1670 SHA256:fc79bc37fcf5e5700546c78a80670bb7b34836d012595b343fe2304cac82917d
'http://deb.debian.org/debian/pool/main/a/adduser/adduser_3.118.tar.xz' adduser_3.118.tar.xz 212280 SHA256:3e9eea661c9aac6b2c791bfcc1de3a9c6a422d45c8f3d38ed417737ed3166ffc
```
Other potentially useful URLs:
- https://sources.debian.net/src/adduser/3.118/ (for browsing the source)
- https://sources.debian.net/src/adduser/3.118/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/adduser/3.118/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `apt=2.2.4`
Binary Packages:
- `apt=2.2.4`
- `libapt-pkg6.0:amd64=2.2.4`
Licenses: (parsed from: `/usr/share/doc/apt/copyright`, `/usr/share/doc/libapt-pkg6.0/copyright`)
- `GPL-2`
- `GPLv2+`
Source:
```console
$ apt-get source -qq --print-uris apt=2.2.4
'http://deb.debian.org/debian/pool/main/a/apt/apt_2.2.4.dsc' apt_2.2.4.dsc 2780 SHA256:750079533300bc3a4f3e10a9c8dbffaa0781b92e3616a12d7e18ab1378ca4466
'http://deb.debian.org/debian/pool/main/a/apt/apt_2.2.4.tar.xz' apt_2.2.4.tar.xz 2197424 SHA256:6eecd04a4979bd2040b22a14571c15d342c4e1802b2023acb5aa19649b1f64ea
```
Other potentially useful URLs:
- https://sources.debian.net/src/apt/2.2.4/ (for browsing the source)
- https://sources.debian.net/src/apt/2.2.4/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/apt/2.2.4/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `attr=1:2.4.48-6`
Binary Packages:
- `libattr1:amd64=1:2.4.48-6`
Licenses: (parsed from: `/usr/share/doc/libattr1/copyright`)
- `GPL-2`
- `GPL-2+`
- `LGPL-2+`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris attr=1:2.4.48-6
'http://deb.debian.org/debian/pool/main/a/attr/attr_2.4.48-6.dsc' attr_2.4.48-6.dsc 2433 SHA256:d55d1ba40517146e9a43f9ed1c5dbd82cfe079cd1fdb852323717a953515cfa4
'http://deb.debian.org/debian/pool/main/a/attr/attr_2.4.48.orig.tar.gz' attr_2.4.48.orig.tar.gz 467840 SHA256:5ead72b358ec709ed00bbf7a9eaef1654baad937c001c044fe8b74c57f5324e7
'http://deb.debian.org/debian/pool/main/a/attr/attr_2.4.48.orig.tar.gz.asc' attr_2.4.48.orig.tar.gz.asc 833 SHA256:5d23c2c83cc13d170f1c209f48d0efa1fc46d16487b790e9996c5206dcfe0395
'http://deb.debian.org/debian/pool/main/a/attr/attr_2.4.48-6.debian.tar.xz' attr_2.4.48-6.debian.tar.xz 27260 SHA256:77f7e03cc8dd039abc3e4a7353f816a3b07fbd0b22d7784f635c5edf7d20b6df
```
Other potentially useful URLs:
- https://sources.debian.net/src/attr/1:2.4.48-6/ (for browsing the source)
- https://sources.debian.net/src/attr/1:2.4.48-6/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/attr/1:2.4.48-6/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `audit=1:3.0-2`
Binary Packages:
- `libaudit-common=1:3.0-2`
- `libaudit1:amd64=1:3.0-2`
Licenses: (parsed from: `/usr/share/doc/libaudit-common/copyright`, `/usr/share/doc/libaudit1/copyright`)
- `GPL-1`
- `GPL-2`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris audit=1:3.0-2
'http://deb.debian.org/debian/pool/main/a/audit/audit_3.0-2.dsc' audit_3.0-2.dsc 2397 SHA256:3cb83cc7649bb854c76f9cb6744b34091e667e433a91a57323938fdf3f353227
'http://deb.debian.org/debian/pool/main/a/audit/audit_3.0.orig.tar.gz' audit_3.0.orig.tar.gz 1109442 SHA256:bd31826823b912b6fe271d2d979ed879e9fc393cab1e2f7c4e1af258231765b8
'http://deb.debian.org/debian/pool/main/a/audit/audit_3.0-2.debian.tar.xz' audit_3.0-2.debian.tar.xz 18640 SHA256:10193fa9823eb66dfb1220fb109b8b8e01f3f720c5a1630e9015d92aa7a8ce3a
```
Other potentially useful URLs:
- https://sources.debian.net/src/audit/1:3.0-2/ (for browsing the source)
- https://sources.debian.net/src/audit/1:3.0-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/audit/1:3.0-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `base-files=11.1+deb11u2`
Binary Packages:
- `base-files=11.1+deb11u2`
Licenses: (parsed from: `/usr/share/doc/base-files/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris base-files=11.1+deb11u2
'http://deb.debian.org/debian/pool/main/b/base-files/base-files_11.1%2bdeb11u2.dsc' base-files_11.1+deb11u2.dsc 1110 SHA256:5468729fe6f6971890303bacd8d8b6b32c12cfa1f8b00be6d06254e3e318246d
'http://deb.debian.org/debian/pool/main/b/base-files/base-files_11.1%2bdeb11u2.tar.xz' base-files_11.1+deb11u2.tar.xz 65556 SHA256:a0338008933fcc8260c91e526dab8f0ae38811cc5a0b8f64057101b2f27aeb32
```
Other potentially useful URLs:
- https://sources.debian.net/src/base-files/11.1+deb11u2/ (for browsing the source)
- https://sources.debian.net/src/base-files/11.1+deb11u2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/base-files/11.1+deb11u2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `base-passwd=3.5.51`
Binary Packages:
- `base-passwd=3.5.51`
Licenses: (parsed from: `/usr/share/doc/base-passwd/copyright`)
- `GPL-2`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris base-passwd=3.5.51
'http://deb.debian.org/debian/pool/main/b/base-passwd/base-passwd_3.5.51.dsc' base-passwd_3.5.51.dsc 1757 SHA256:5752e4c2e3b9b4d45502f6aa5ce8dfd0136ea60f1b4fbd4524385e4bbd6a1571
'http://deb.debian.org/debian/pool/main/b/base-passwd/base-passwd_3.5.51.tar.xz' base-passwd_3.5.51.tar.xz 53980 SHA256:66c75ce1877759148dbdd2704b138c4a02adab89d7d7591b6ab184f8f614efba
```
Other potentially useful URLs:
- https://sources.debian.net/src/base-passwd/3.5.51/ (for browsing the source)
- https://sources.debian.net/src/base-passwd/3.5.51/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/base-passwd/3.5.51/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `bash=5.1-2`
Binary Packages:
- `bash=5.1-2+b3`
Licenses: (parsed from: `/usr/share/doc/bash/copyright`)
- `GPL-3`
Source:
```console
$ apt-get source -qq --print-uris bash=5.1-2
'http://deb.debian.org/debian/pool/main/b/bash/bash_5.1-2.dsc' bash_5.1-2.dsc 2296 SHA256:1129f1397ec8e673bb8fc6acf53b371b9ed4132a7076e59bc0bf0f8e8d134e32
'http://deb.debian.org/debian/pool/main/b/bash/bash_5.1.orig.tar.xz' bash_5.1.orig.tar.xz 5802740 SHA256:d5eeee4f953c09826409d572e2e8996a2140d67eb8f382ce1f3a9d23883ad696
'http://deb.debian.org/debian/pool/main/b/bash/bash_5.1-2.debian.tar.xz' bash_5.1-2.debian.tar.xz 90660 SHA256:b41f4a62e613ccffbef6032eee4d671bf82cdb00472c452fcb0c510a1503710c
```
Other potentially useful URLs:
- https://sources.debian.net/src/bash/5.1-2/ (for browsing the source)
- https://sources.debian.net/src/bash/5.1-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/bash/5.1-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `bzip2=1.0.8-4`
Binary Packages:
- `libbz2-1.0:amd64=1.0.8-4`
Licenses: (parsed from: `/usr/share/doc/libbz2-1.0/copyright`)
- `BSD-variant`
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris bzip2=1.0.8-4
'http://deb.debian.org/debian/pool/main/b/bzip2/bzip2_1.0.8-4.dsc' bzip2_1.0.8-4.dsc 1603 SHA256:662c5e656a87db884fdc070239f5112cba1e616f20ff260de602876f70415c7b
'http://deb.debian.org/debian/pool/main/b/bzip2/bzip2_1.0.8.orig.tar.gz' bzip2_1.0.8.orig.tar.gz 810029 SHA256:ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269
'http://deb.debian.org/debian/pool/main/b/bzip2/bzip2_1.0.8-4.debian.tar.bz2' bzip2_1.0.8-4.debian.tar.bz2 26515 SHA256:3f3b26d83120260c7b2e69a5c89649bb818a79955b960fb34a5fae106f008a5d
```
Other potentially useful URLs:
- https://sources.debian.net/src/bzip2/1.0.8-4/ (for browsing the source)
- https://sources.debian.net/src/bzip2/1.0.8-4/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/bzip2/1.0.8-4/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `cdebconf=0.260`
Binary Packages:
- `libdebconfclient0:amd64=0.260`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris cdebconf=0.260
'http://deb.debian.org/debian/pool/main/c/cdebconf/cdebconf_0.260.dsc' cdebconf_0.260.dsc 2750 SHA256:0c0a3d76e19685f998e3b85834200255268f36b09eedfa9157fe0213958b7ea5
'http://deb.debian.org/debian/pool/main/c/cdebconf/cdebconf_0.260.tar.xz' cdebconf_0.260.tar.xz 279824 SHA256:ac8a9d7449c76eeaa8ed4ef0bbbf4c16b1b816b9905690c732dea2f341ac079b
```
Other potentially useful URLs:
- https://sources.debian.net/src/cdebconf/0.260/ (for browsing the source)
- https://sources.debian.net/src/cdebconf/0.260/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/cdebconf/0.260/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `coreutils=8.32-4`
Binary Packages:
- `coreutils=8.32-4+b1`
Licenses: (parsed from: `/usr/share/doc/coreutils/copyright`)
- `GPL-3`
Source:
```console
$ apt-get source -qq --print-uris coreutils=8.32-4
'http://deb.debian.org/debian/pool/main/c/coreutils/coreutils_8.32-4.dsc' coreutils_8.32-4.dsc 2096 SHA256:ea8cafd14b693ec2d8b6e33ee8564c1fa5f102e65574252b0d524aaee04ba7e9
'http://deb.debian.org/debian/pool/main/c/coreutils/coreutils_8.32.orig.tar.xz' coreutils_8.32.orig.tar.xz 5547836 SHA256:4458d8de7849df44ccab15e16b1548b285224dbba5f08fac070c1c0e0bcc4cfa
'http://deb.debian.org/debian/pool/main/c/coreutils/coreutils_8.32.orig.tar.xz.asc' coreutils_8.32.orig.tar.xz.asc 833 SHA256:71b944375b322ba77c9c56b687b48df885c676d4fd7c465b3706713a9b62ce0a
'http://deb.debian.org/debian/pool/main/c/coreutils/coreutils_8.32-4.debian.tar.xz' coreutils_8.32-4.debian.tar.xz 33028 SHA256:2d5337067b675e0b3fa7c88df164e7738ed4715a39e88e1e82dc9185e4e1b951
```
Other potentially useful URLs:
- https://sources.debian.net/src/coreutils/8.32-4/ (for browsing the source)
- https://sources.debian.net/src/coreutils/8.32-4/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/coreutils/8.32-4/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `dash=0.5.11+git20200708+dd9ef66-5`
Binary Packages:
- `dash=0.5.11+git20200708+dd9ef66-5`
Licenses: (parsed from: `/usr/share/doc/dash/copyright`)
- `BSD-3-Clause`
- `BSD-3-clause`
- `Expat`
- `FSFUL`
- `FSFULLR`
- `GPL-2`
- `GPL-2+`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris dash=0.5.11+git20200708+dd9ef66-5
'http://deb.debian.org/debian/pool/main/d/dash/dash_0.5.11%2bgit20200708%2bdd9ef66-5.dsc' dash_0.5.11+git20200708+dd9ef66-5.dsc 1906 SHA256:b0568c34647dc2aa0b8e2656c5e7449d9a1feb4b89d6857f507173b1f9a42ee7
'http://deb.debian.org/debian/pool/main/d/dash/dash_0.5.11%2bgit20200708%2bdd9ef66.orig.tar.gz' dash_0.5.11+git20200708+dd9ef66.orig.tar.gz 167776 SHA256:ab70b1f165bfedadd1282da546f1c917f1b7ccb2c5c2f898310a963e2ab5520c
'http://deb.debian.org/debian/pool/main/d/dash/dash_0.5.11%2bgit20200708%2bdd9ef66-5.debian.tar.xz' dash_0.5.11+git20200708+dd9ef66-5.debian.tar.xz 43120 SHA256:5da6039e043c953ff91a31c767ed703699870682ff356a1642f4798ce04a2926
```
Other potentially useful URLs:
- https://sources.debian.net/src/dash/0.5.11+git20200708+dd9ef66-5/ (for browsing the source)
- https://sources.debian.net/src/dash/0.5.11+git20200708+dd9ef66-5/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/dash/0.5.11+git20200708+dd9ef66-5/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `db5.3=5.3.28+dfsg1-0.8`
Binary Packages:
- `libdb5.3:amd64=5.3.28+dfsg1-0.8`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris db5.3=5.3.28+dfsg1-0.8
'http://deb.debian.org/debian/pool/main/d/db5.3/db5.3_5.3.28%2bdfsg1-0.8.dsc' db5.3_5.3.28+dfsg1-0.8.dsc 3113 SHA256:5189bebd157e3b51c075804d1affebc87cdbfb782808c621e131660719c24374
'http://deb.debian.org/debian/pool/main/d/db5.3/db5.3_5.3.28%2bdfsg1.orig.tar.xz' db5.3_5.3.28+dfsg1.orig.tar.xz 19723860 SHA256:b19bf3dd8ce74b95a7b215be9a7c8489e8e8f18da60d64d6340a06e75f497749
'http://deb.debian.org/debian/pool/main/d/db5.3/db5.3_5.3.28%2bdfsg1-0.8.debian.tar.xz' db5.3_5.3.28+dfsg1-0.8.debian.tar.xz 30748 SHA256:073c0c87283bf5e606f3ce6d1814315b40b9685c943601ae3fd81e2da4e612d4
```
Other potentially useful URLs:
- https://sources.debian.net/src/db5.3/5.3.28+dfsg1-0.8/ (for browsing the source)
- https://sources.debian.net/src/db5.3/5.3.28+dfsg1-0.8/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/db5.3/5.3.28+dfsg1-0.8/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `debconf=1.5.77`
Binary Packages:
- `debconf=1.5.77`
Licenses: (parsed from: `/usr/share/doc/debconf/copyright`)
- `BSD-2-clause`
Source:
```console
$ apt-get source -qq --print-uris debconf=1.5.77
'http://deb.debian.org/debian/pool/main/d/debconf/debconf_1.5.77.dsc' debconf_1.5.77.dsc 2082 SHA256:2797e40ac2122a0ca6c1aa27bd63203e9da4342bb60e614efb848452a5696e41
'http://deb.debian.org/debian/pool/main/d/debconf/debconf_1.5.77.tar.xz' debconf_1.5.77.tar.xz 571412 SHA256:03482934c645140dd4cb8cae4970f81f576995b757ac7b89192067e72aa1d067
```
Other potentially useful URLs:
- https://sources.debian.net/src/debconf/1.5.77/ (for browsing the source)
- https://sources.debian.net/src/debconf/1.5.77/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/debconf/1.5.77/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `debian-archive-keyring=2021.1.1`
Binary Packages:
- `debian-archive-keyring=2021.1.1`
Licenses: (parsed from: `/usr/share/doc/debian-archive-keyring/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris debian-archive-keyring=2021.1.1
'http://deb.debian.org/debian/pool/main/d/debian-archive-keyring/debian-archive-keyring_2021.1.1.dsc' debian-archive-keyring_2021.1.1.dsc 1854 SHA256:a17a062b6dabe2d1092ee362412b8f2c9d4a44c7bd18ef2bbb45340c2ee4c512
'http://deb.debian.org/debian/pool/main/d/debian-archive-keyring/debian-archive-keyring_2021.1.1.tar.xz' debian-archive-keyring_2021.1.1.tar.xz 151340 SHA256:5fe6011f7caf516b19b8f2c545bd215f4b6f8022b161d1ce5262ac2c51c4dbcf
```
Other potentially useful URLs:
- https://sources.debian.net/src/debian-archive-keyring/2021.1.1/ (for browsing the source)
- https://sources.debian.net/src/debian-archive-keyring/2021.1.1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/debian-archive-keyring/2021.1.1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `debianutils=4.11.2`
Binary Packages:
- `debianutils=4.11.2`
Licenses: (parsed from: `/usr/share/doc/debianutils/copyright`)
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris debianutils=4.11.2
'http://deb.debian.org/debian/pool/main/d/debianutils/debianutils_4.11.2.dsc' debianutils_4.11.2.dsc 1644 SHA256:b11164a7aa3ca07ae1d758d15d707928defb64f2c35bf96f2e4fd983ee17b310
'http://deb.debian.org/debian/pool/main/d/debianutils/debianutils_4.11.2.tar.xz' debianutils_4.11.2.tar.xz 158132 SHA256:3b680e81709b740387335fac8f8806d71611dcf60874e1a792e862e48a1650de
```
Other potentially useful URLs:
- https://sources.debian.net/src/debianutils/4.11.2/ (for browsing the source)
- https://sources.debian.net/src/debianutils/4.11.2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/debianutils/4.11.2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `diffutils=1:3.7-5`
Binary Packages:
- `diffutils=1:3.7-5`
Licenses: (parsed from: `/usr/share/doc/diffutils/copyright`)
- `GFDL`
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris diffutils=1:3.7-5
'http://deb.debian.org/debian/pool/main/d/diffutils/diffutils_3.7-5.dsc' diffutils_3.7-5.dsc 1714 SHA256:5476ed004e300f291b5f0a356074a8ba8944a8b34514bb0fe95d274455adbf5d
'http://deb.debian.org/debian/pool/main/d/diffutils/diffutils_3.7.orig.tar.xz' diffutils_3.7.orig.tar.xz 1448828 SHA256:b3a7a6221c3dc916085f0d205abf6b8e1ba443d4dd965118da364a1dc1cb3a26
'http://deb.debian.org/debian/pool/main/d/diffutils/diffutils_3.7.orig.tar.xz.asc' diffutils_3.7.orig.tar.xz.asc 833 SHA256:c89b9d60a1d67cf8b2dd108a8b918e4cce34cba6c9e1f67e2ca482c52c0258a7
'http://deb.debian.org/debian/pool/main/d/diffutils/diffutils_3.7-5.debian.tar.xz' diffutils_3.7-5.debian.tar.xz 89004 SHA256:c90fd39d677702226b89d7559c124d7eb0b88195c381853ca1e5c8ca08e90a3a
```
Other potentially useful URLs:
- https://sources.debian.net/src/diffutils/1:3.7-5/ (for browsing the source)
- https://sources.debian.net/src/diffutils/1:3.7-5/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/diffutils/1:3.7-5/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `dpkg=1.20.9`
Binary Packages:
- `dpkg=1.20.9`
Licenses: (parsed from: `/usr/share/doc/dpkg/copyright`)
- `BSD-2-clause`
- `GPL-2`
- `GPL-2+`
- `public-domain-md5`
- `public-domain-s-s-d`
Source:
```console
$ apt-get source -qq --print-uris dpkg=1.20.9
'http://deb.debian.org/debian/pool/main/d/dpkg/dpkg_1.20.9.dsc' dpkg_1.20.9.dsc 2120 SHA256:87f21320f3165d1c57dae2314b7fd1849b49da9416fee3fb57c4b1e4192b4285
'http://deb.debian.org/debian/pool/main/d/dpkg/dpkg_1.20.9.tar.xz' dpkg_1.20.9.tar.xz 4954428 SHA256:5ce242830f213b5620f08e6c4183adb1ef4dc9da28d31988a27c87c71fe534ce
```
Other potentially useful URLs:
- https://sources.debian.net/src/dpkg/1.20.9/ (for browsing the source)
- https://sources.debian.net/src/dpkg/1.20.9/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/dpkg/1.20.9/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `e2fsprogs=1.46.2-2`
Binary Packages:
- `e2fsprogs=1.46.2-2`
- `libcom-err2:amd64=1.46.2-2`
- `libext2fs2:amd64=1.46.2-2`
- `libss2:amd64=1.46.2-2`
- `logsave=1.46.2-2`
Licenses: (parsed from: `/usr/share/doc/e2fsprogs/copyright`, `/usr/share/doc/libcom-err2/copyright`, `/usr/share/doc/libext2fs2/copyright`, `/usr/share/doc/libss2/copyright`, `/usr/share/doc/logsave/copyright`)
- `GPL-2`
- `LGPL-2`
Source:
```console
$ apt-get source -qq --print-uris e2fsprogs=1.46.2-2
'http://deb.debian.org/debian/pool/main/e/e2fsprogs/e2fsprogs_1.46.2-2.dsc' e2fsprogs_1.46.2-2.dsc 2842 SHA256:5b25910da7b90e40881d2cf63ebb4ae49642a8730f6e2a9c953e365dddccb73c
'http://deb.debian.org/debian/pool/main/e/e2fsprogs/e2fsprogs_1.46.2.orig.tar.gz' e2fsprogs_1.46.2.orig.tar.gz 9496954 SHA256:f79f26b4f65bdc059fca12e1ec6a3040c3ce1a503fb70eb915bee71903815cd5
'http://deb.debian.org/debian/pool/main/e/e2fsprogs/e2fsprogs_1.46.2.orig.tar.gz.asc' e2fsprogs_1.46.2.orig.tar.gz.asc 488 SHA256:948552550f23a9e0223cecb51b5b85258c9d94895a20bce1180fce770628a55f
'http://deb.debian.org/debian/pool/main/e/e2fsprogs/e2fsprogs_1.46.2-2.debian.tar.xz' e2fsprogs_1.46.2-2.debian.tar.xz 92624 SHA256:dc67d61815c524922e7461040d732bd245cf0196f7cc8a91ea7911a87b38f737
```
Other potentially useful URLs:
- https://sources.debian.net/src/e2fsprogs/1.46.2-2/ (for browsing the source)
- https://sources.debian.net/src/e2fsprogs/1.46.2-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/e2fsprogs/1.46.2-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `findutils=4.8.0-1`
Binary Packages:
- `findutils=4.8.0-1`
Licenses: (parsed from: `/usr/share/doc/findutils/copyright`)
- `GFDL-1.3`
- `GPL-3`
Source:
```console
$ apt-get source -qq --print-uris findutils=4.8.0-1
'http://deb.debian.org/debian/pool/main/f/findutils/findutils_4.8.0-1.dsc' findutils_4.8.0-1.dsc 2302 SHA256:47f342ec5146f4138f5004dbefe5838656057b502dfe225884b9f56840e29a3b
'http://deb.debian.org/debian/pool/main/f/findutils/findutils_4.8.0.orig.tar.xz' findutils_4.8.0.orig.tar.xz 1983096 SHA256:57127b7e97d91282c6ace556378d5455a9509898297e46e10443016ea1387164
'http://deb.debian.org/debian/pool/main/f/findutils/findutils_4.8.0.orig.tar.xz.asc' findutils_4.8.0.orig.tar.xz.asc 488 SHA256:dc0d5251026532d2b115e447eea70a934d3df6a0efcaf225c9d585eeedeefe62
'http://deb.debian.org/debian/pool/main/f/findutils/findutils_4.8.0-1.debian.tar.xz' findutils_4.8.0-1.debian.tar.xz 27296 SHA256:c99753f13f9e79653f79a398d1aafb15294c8f7953ad86948c7bf4cb0032bb43
```
Other potentially useful URLs:
- https://sources.debian.net/src/findutils/4.8.0-1/ (for browsing the source)
- https://sources.debian.net/src/findutils/4.8.0-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/findutils/4.8.0-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gcc-10=10.2.1-6`
Binary Packages:
- `gcc-10-base:amd64=10.2.1-6`
- `libgcc-s1:amd64=10.2.1-6`
- `libstdc++6:amd64=10.2.1-6`
Licenses: (parsed from: `/usr/share/doc/gcc-10-base/copyright`, `/usr/share/doc/libgcc-s1/copyright`, `/usr/share/doc/libstdc++6/copyright`)
- `Artistic`
- `GFDL-1.2`
- `GPL`
- `GPL-2`
- `GPL-3`
- `LGPL`
Source:
```console
$ apt-get source -qq --print-uris gcc-10=10.2.1-6
'http://deb.debian.org/debian/pool/main/g/gcc-10/gcc-10_10.2.1-6.dsc' gcc-10_10.2.1-6.dsc 27632 SHA256:24024c1e225ca968f37ce39047ff5f1058219976db9e88a807173c2f07fa6029
'http://deb.debian.org/debian/pool/main/g/gcc-10/gcc-10_10.2.1.orig.tar.xz' gcc-10_10.2.1.orig.tar.xz 84547844 SHA256:ea3c05faa381486e6b859c047dc14977418bf1ccda4567064e016493fd6fffec
'http://deb.debian.org/debian/pool/main/g/gcc-10/gcc-10_10.2.1-6.debian.tar.xz' gcc-10_10.2.1-6.debian.tar.xz 2366560 SHA256:a95d6b9da2be83f9751850b002021281411ff1003d9feb77298b131da47820b3
```
Other potentially useful URLs:
- https://sources.debian.net/src/gcc-10/10.2.1-6/ (for browsing the source)
- https://sources.debian.net/src/gcc-10/10.2.1-6/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gcc-10/10.2.1-6/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gcc-9=9.3.0-22`
Binary Packages:
- `gcc-9-base:amd64=9.3.0-22`
Licenses: (parsed from: `/usr/share/doc/gcc-9-base/copyright`)
- `Artistic`
- `GFDL-1.2`
- `GPL`
- `GPL-2`
- `GPL-3`
- `LGPL`
- `LGPL-2.1+`
Source:
```console
$ apt-get source -qq --print-uris gcc-9=9.3.0-22
'http://deb.debian.org/debian/pool/main/g/gcc-9/gcc-9_9.3.0-22.dsc' gcc-9_9.3.0-22.dsc 21926 SHA256:14a0ea03cee0eb5450cc630a3bdf47da157062b3e7622ac45f6ae14a321eae96
'http://deb.debian.org/debian/pool/main/g/gcc-9/gcc-9_9.3.0.orig.tar.gz' gcc-9_9.3.0.orig.tar.gz 88686943 SHA256:824044ffa96eb337bb1c1d4cf6a82691d0290d6f42e1d13362eea855458de060
'http://deb.debian.org/debian/pool/main/g/gcc-9/gcc-9_9.3.0-22.debian.tar.xz' gcc-9_9.3.0-22.debian.tar.xz 904252 SHA256:68d55260456847880c71831b69c19cb81e9d1abf09274ab77ab6c081e177d94d
```
Other potentially useful URLs:
- https://sources.debian.net/src/gcc-9/9.3.0-22/ (for browsing the source)
- https://sources.debian.net/src/gcc-9/9.3.0-22/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gcc-9/9.3.0-22/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `glibc=2.31-13+deb11u2`
Binary Packages:
- `libc-bin=2.31-13+deb11u2`
- `libc6:amd64=2.31-13+deb11u2`
Licenses: (parsed from: `/usr/share/doc/libc-bin/copyright`, `/usr/share/doc/libc6/copyright`)
- `GPL-2`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris glibc=2.31-13+deb11u2
'http://deb.debian.org/debian/pool/main/g/glibc/glibc_2.31-13%2bdeb11u2.dsc' glibc_2.31-13+deb11u2.dsc 8347 SHA256:ba641c41004c07c52e0f936785c395d2654af6c4dd3c400f7a929c51a26afc5e
'http://deb.debian.org/debian/pool/main/g/glibc/glibc_2.31.orig.tar.xz' glibc_2.31.orig.tar.xz 17254692 SHA256:3dc7704b6166839c37d7047626fd199f3d4c09aca0d90e48c51c31c967dce34e
'http://deb.debian.org/debian/pool/main/g/glibc/glibc_2.31-13%2bdeb11u2.debian.tar.xz' glibc_2.31-13+deb11u2.debian.tar.xz 911924 SHA256:d79509e8f8f91544bcf3dba5f4a17fc9c7d0692bcde8d9a9a32b431e80ce0a95
```
Other potentially useful URLs:
- https://sources.debian.net/src/glibc/2.31-13+deb11u2/ (for browsing the source)
- https://sources.debian.net/src/glibc/2.31-13+deb11u2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/glibc/2.31-13+deb11u2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gmp=2:6.2.1+dfsg-1+deb11u1`
Binary Packages:
- `libgmp10:amd64=2:6.2.1+dfsg-1+deb11u1`
Licenses: (parsed from: `/usr/share/doc/libgmp10/copyright`)
- `GPL`
- `GPL-2`
- `GPL-3`
- `LGPL-3`
Source:
```console
$ apt-get source -qq --print-uris gmp=2:6.2.1+dfsg-1+deb11u1
'http://deb.debian.org/debian/pool/main/g/gmp/gmp_6.2.1%2bdfsg-1%2bdeb11u1.dsc' gmp_6.2.1+dfsg-1+deb11u1.dsc 2181 SHA256:4c09eb0a1c333fc5e67184a18f050af0f46f7f0fdeb533557bebd89df07c137b
'http://deb.debian.org/debian/pool/main/g/gmp/gmp_6.2.1%2bdfsg.orig.tar.xz' gmp_6.2.1+dfsg.orig.tar.xz 1853476 SHA256:c6ba08e3f079260ab90ff44ab8801eae134cd62cd78f4aa56317c0e70daa40cb
'http://deb.debian.org/debian/pool/main/g/gmp/gmp_6.2.1%2bdfsg-1%2bdeb11u1.debian.tar.xz' gmp_6.2.1+dfsg-1+deb11u1.debian.tar.xz 21920 SHA256:3cde187d542f5c095c6db8b76ec5252353e0413b492c57eb2e67ed3c43f40172
```
Other potentially useful URLs:
- https://sources.debian.net/src/gmp/2:6.2.1+dfsg-1+deb11u1/ (for browsing the source)
- https://sources.debian.net/src/gmp/2:6.2.1+dfsg-1+deb11u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gmp/2:6.2.1+dfsg-1+deb11u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gnupg2=2.2.27-2`
Binary Packages:
- `gpgv=2.2.27-2`
Licenses: (parsed from: `/usr/share/doc/gpgv/copyright`)
- `BSD-3-clause`
- `CC0-1.0`
- `Expat`
- `GPL-3`
- `GPL-3+`
- `LGPL-2.1`
- `LGPL-2.1+`
- `LGPL-3`
- `LGPL-3+`
- `RFC-Reference`
- `TinySCHEME`
- `permissive`
Source:
```console
$ apt-get source -qq --print-uris gnupg2=2.2.27-2
'http://deb.debian.org/debian/pool/main/g/gnupg2/gnupg2_2.2.27-2.dsc' gnupg2_2.2.27-2.dsc 3644 SHA256:f8a99fd0976958c5656925ea576b26667ac075d0cb145981a9043f0c89a06911
'http://deb.debian.org/debian/pool/main/g/gnupg2/gnupg2_2.2.27.orig.tar.bz2' gnupg2_2.2.27.orig.tar.bz2 7191555 SHA256:34e60009014ea16402069136e0a5f63d9b65f90096244975db5cea74b3d02399
'http://deb.debian.org/debian/pool/main/g/gnupg2/gnupg2_2.2.27-2.debian.tar.xz' gnupg2_2.2.27-2.debian.tar.xz 62720 SHA256:6c67a7acbcab01116a640894751cdf54438caa265fae656d580d42010582591c
```
Other potentially useful URLs:
- https://sources.debian.net/src/gnupg2/2.2.27-2/ (for browsing the source)
- https://sources.debian.net/src/gnupg2/2.2.27-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gnupg2/2.2.27-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gnutls28=3.7.1-5`
Binary Packages:
- `libgnutls30:amd64=3.7.1-5`
Licenses: (parsed from: `/usr/share/doc/libgnutls30/copyright`)
- `Apache-2.0`
- `BSD-3-Clause`
- `CC0 license`
- `Expat`
- `GFDL-1.3`
- `GPL`
- `GPL-3`
- `GPLv3+`
- `LGPL`
- `LGPL-3`
- `LGPLv2.1+`
- `LGPLv3+_or_GPLv2+`
- `The main library is licensed under GNU Lesser`
Source:
```console
$ apt-get source -qq --print-uris gnutls28=3.7.1-5
'http://deb.debian.org/debian/pool/main/g/gnutls28/gnutls28_3.7.1-5.dsc' gnutls28_3.7.1-5.dsc 3487 SHA256:1cdc0abae8cc1b4a86ff4be7e549a5b6e82297645e7f61962e03294987e2ab9f
'http://deb.debian.org/debian/pool/main/g/gnutls28/gnutls28_3.7.1.orig.tar.xz' gnutls28_3.7.1.orig.tar.xz 6038388 SHA256:3777d7963eca5e06eb315686163b7b3f5045e2baac5e54e038ace9835e5cac6f
'http://deb.debian.org/debian/pool/main/g/gnutls28/gnutls28_3.7.1.orig.tar.xz.asc' gnutls28_3.7.1.orig.tar.xz.asc 854 SHA256:13a683b12602c169a7ad7827ab0e3f35c8fa1f98675d0073cf7d54a8cd635582
'http://deb.debian.org/debian/pool/main/g/gnutls28/gnutls28_3.7.1-5.debian.tar.xz' gnutls28_3.7.1-5.debian.tar.xz 88576 SHA256:ec60906fcf50fb01654cf76557cc3810bfa88b0d0492e4d865097b97dd00558d
```
Other potentially useful URLs:
- https://sources.debian.net/src/gnutls28/3.7.1-5/ (for browsing the source)
- https://sources.debian.net/src/gnutls28/3.7.1-5/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gnutls28/3.7.1-5/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `grep=3.6-1`
Binary Packages:
- `grep=3.6-1`
Licenses: (parsed from: `/usr/share/doc/grep/copyright`)
- `GPL-3`
- `GPL-3+`
Source:
```console
$ apt-get source -qq --print-uris grep=3.6-1
'http://deb.debian.org/debian/pool/main/g/grep/grep_3.6-1.dsc' grep_3.6-1.dsc 1644 SHA256:ccf6849a07a2c1fb77d2534a414f402af8cadeeac66a41deda04f3e835b09d3d
'http://deb.debian.org/debian/pool/main/g/grep/grep_3.6.orig.tar.xz' grep_3.6.orig.tar.xz 1589412 SHA256:667e15e8afe189e93f9f21a7cd3a7b3f776202f417330b248c2ad4f997d9373e
'http://deb.debian.org/debian/pool/main/g/grep/grep_3.6.orig.tar.xz.asc' grep_3.6.orig.tar.xz.asc 833 SHA256:02b52c0676e0e97762cee638125a345a5300fdcba691c1a5b0725ee6bd28d4a8
'http://deb.debian.org/debian/pool/main/g/grep/grep_3.6-1.debian.tar.xz' grep_3.6-1.debian.tar.xz 17748 SHA256:67b481210e2db6bb9c45d90f39445a90c83e6d32fc6c8e5b9e89bb40488767c4
```
Other potentially useful URLs:
- https://sources.debian.net/src/grep/3.6-1/ (for browsing the source)
- https://sources.debian.net/src/grep/3.6-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/grep/3.6-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gzip=1.10-4`
Binary Packages:
- `gzip=1.10-4`
Licenses: (parsed from: `/usr/share/doc/gzip/copyright`)
- `FSF-manpages`
- `GFDL-1.3+-no-invariant`
- `GFDL-3`
- `GPL-3`
- `GPL-3+`
Source:
```console
$ apt-get source -qq --print-uris gzip=1.10-4
'http://deb.debian.org/debian/pool/main/g/gzip/gzip_1.10-4.dsc' gzip_1.10-4.dsc 1780 SHA256:c2728d6a042bf41e43f8bf86f520682a312235f981cca26a60fc0745ff536459
'http://deb.debian.org/debian/pool/main/g/gzip/gzip_1.10.orig.tar.gz' gzip_1.10.orig.tar.gz 1201421 SHA256:c91f74430bf7bc20402e1f657d0b252cb80aa66ba333a25704512af346633c68
'http://deb.debian.org/debian/pool/main/g/gzip/gzip_1.10-4.debian.tar.xz' gzip_1.10-4.debian.tar.xz 19300 SHA256:f3e40d75fe3f695c76f028194b2031a2016a302b3c95d28ebc52b8538331a708
```
Other potentially useful URLs:
- https://sources.debian.net/src/gzip/1.10-4/ (for browsing the source)
- https://sources.debian.net/src/gzip/1.10-4/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gzip/1.10-4/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `hostname=3.23`
Binary Packages:
- `hostname=3.23`
Licenses: (parsed from: `/usr/share/doc/hostname/copyright`)
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris hostname=3.23
'http://deb.debian.org/debian/pool/main/h/hostname/hostname_3.23.dsc' hostname_3.23.dsc 1402 SHA256:0694c083fad82da1fd33204557a30bfc745a689a64030ba360062daafe03ede0
'http://deb.debian.org/debian/pool/main/h/hostname/hostname_3.23.tar.gz' hostname_3.23.tar.gz 13672 SHA256:bc6d1954b22849869ff8b2a602e39f08b1702f686d4b58dd7927cdeb5b4876ef
```
Other potentially useful URLs:
- https://sources.debian.net/src/hostname/3.23/ (for browsing the source)
- https://sources.debian.net/src/hostname/3.23/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/hostname/3.23/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `init-system-helpers=1.60`
Binary Packages:
- `init-system-helpers=1.60`
Licenses: (parsed from: `/usr/share/doc/init-system-helpers/copyright`)
- `BSD-3-clause`
- `GPL-2`
- `GPL-2+`
Source:
```console
$ apt-get source -qq --print-uris init-system-helpers=1.60
'http://deb.debian.org/debian/pool/main/i/init-system-helpers/init-system-helpers_1.60.dsc' init-system-helpers_1.60.dsc 1902 SHA256:51dd15cc34daf5e58e40560563785d422fb27ac8a2f6ce4e73350a800cbf3265
'http://deb.debian.org/debian/pool/main/i/init-system-helpers/init-system-helpers_1.60.tar.xz' init-system-helpers_1.60.tar.xz 40584 SHA256:2cf987e5ec2412faab8e99d6f26598b6ae65afe1af2073133575224997082172
```
Other potentially useful URLs:
- https://sources.debian.net/src/init-system-helpers/1.60/ (for browsing the source)
- https://sources.debian.net/src/init-system-helpers/1.60/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/init-system-helpers/1.60/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `keyutils=1.6.1-2`
Binary Packages:
- `libkeyutils1:amd64=1.6.1-2`
Licenses: (parsed from: `/usr/share/doc/libkeyutils1/copyright`)
- `GPL-2`
- `GPL-2+`
- `LGPL-2`
- `LGPL-2+`
Source:
```console
$ apt-get source -qq --print-uris keyutils=1.6.1-2
'http://deb.debian.org/debian/pool/main/k/keyutils/keyutils_1.6.1-2.dsc' keyutils_1.6.1-2.dsc 2076 SHA256:6dd531f522fb3c5d8cfaaaf726e9277b64f50bff8c05d06269f42a677f65a4a8
'http://deb.debian.org/debian/pool/main/k/keyutils/keyutils_1.6.1.orig.tar.bz2' keyutils_1.6.1.orig.tar.bz2 97232 SHA256:c8b15722ae51d95b9ad76cc6d49a4c2cc19b0c60f72f61fb9bf43eea7cbd64ce
'http://deb.debian.org/debian/pool/main/k/keyutils/keyutils_1.6.1-2.debian.tar.xz' keyutils_1.6.1-2.debian.tar.xz 13412 SHA256:862442538428b514bb33a1c8488d4528c5ea48feca0ea5e60d8d34fd440f2355
```
Other potentially useful URLs:
- https://sources.debian.net/src/keyutils/1.6.1-2/ (for browsing the source)
- https://sources.debian.net/src/keyutils/1.6.1-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/keyutils/1.6.1-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `krb5=1.18.3-6+deb11u1`
Binary Packages:
- `libgssapi-krb5-2:amd64=1.18.3-6+deb11u1`
- `libk5crypto3:amd64=1.18.3-6+deb11u1`
- `libkrb5-3:amd64=1.18.3-6+deb11u1`
- `libkrb5support0:amd64=1.18.3-6+deb11u1`
Licenses: (parsed from: `/usr/share/doc/libgssapi-krb5-2/copyright`, `/usr/share/doc/libk5crypto3/copyright`, `/usr/share/doc/libkrb5-3/copyright`, `/usr/share/doc/libkrb5support0/copyright`)
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris krb5=1.18.3-6+deb11u1
'http://deb.debian.org/debian/pool/main/k/krb5/krb5_1.18.3-6%2bdeb11u1.dsc' krb5_1.18.3-6+deb11u1.dsc 2971 SHA256:db16b93a4beae887fe38dfbb19d1c220501c185bb4416924c4ff531312dba91e
'http://deb.debian.org/debian/pool/main/k/krb5/krb5_1.18.3.orig.tar.gz' krb5_1.18.3.orig.tar.gz 8715312 SHA256:e61783c292b5efd9afb45c555a80dd267ac67eebabca42185362bee6c4fbd719
'http://deb.debian.org/debian/pool/main/k/krb5/krb5_1.18.3-6%2bdeb11u1.debian.tar.xz' krb5_1.18.3-6+deb11u1.debian.tar.xz 106192 SHA256:c68ecf8e8f4238f8950a7c409392c9a0661a6ad0d5efd88b6f8b0a39f7e8af21
```
Other potentially useful URLs:
- https://sources.debian.net/src/krb5/1.18.3-6+deb11u1/ (for browsing the source)
- https://sources.debian.net/src/krb5/1.18.3-6+deb11u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/krb5/1.18.3-6+deb11u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libcap-ng=0.7.9-2.2`
Binary Packages:
- `libcap-ng0:amd64=0.7.9-2.2+b1`
Licenses: (parsed from: `/usr/share/doc/libcap-ng0/copyright`)
- `GPL-2`
- `GPL-3`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris libcap-ng=0.7.9-2.2
'http://deb.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng_0.7.9-2.2.dsc' libcap-ng_0.7.9-2.2.dsc 2081 SHA256:d573ce59d83d2c117515e7c57dde1c990e9c5a34e0f53ac09f6b4d3e153e9aae
'http://deb.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng_0.7.9.orig.tar.gz' libcap-ng_0.7.9.orig.tar.gz 449038 SHA256:4a1532bcf3731aade40936f6d6a586ed5a66ca4c7455e1338d1f6c3e09221328
'http://deb.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng_0.7.9-2.2.debian.tar.xz' libcap-ng_0.7.9-2.2.debian.tar.xz 6308 SHA256:6d7b5cfcf435fe996e5dc78770a9ab1ab614ced5bee56e3e0ba4e09d8c832a0a
```
Other potentially useful URLs:
- https://sources.debian.net/src/libcap-ng/0.7.9-2.2/ (for browsing the source)
- https://sources.debian.net/src/libcap-ng/0.7.9-2.2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libcap-ng/0.7.9-2.2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libffi=3.3-6`
Binary Packages:
- `libffi7:amd64=3.3-6`
Licenses: (parsed from: `/usr/share/doc/libffi7/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris libffi=3.3-6
'http://deb.debian.org/debian/pool/main/libf/libffi/libffi_3.3-6.dsc' libffi_3.3-6.dsc 1934 SHA256:cb5dcd6b54e0c8c7db4cd97deef68ac9e2ede49138ca5db194b60338eae8dd65
'http://deb.debian.org/debian/pool/main/libf/libffi/libffi_3.3.orig.tar.gz' libffi_3.3.orig.tar.gz 1305466 SHA256:72fba7922703ddfa7a028d513ac15a85c8d54c8d67f55fa5a4802885dc652056
'http://deb.debian.org/debian/pool/main/libf/libffi/libffi_3.3-6.debian.tar.xz' libffi_3.3-6.debian.tar.xz 9168 SHA256:d15879289f32acf2afbbcc6ccf6e0c1aa306f6f06abb8b0301bfa41bffea9a55
```
Other potentially useful URLs:
- https://sources.debian.net/src/libffi/3.3-6/ (for browsing the source)
- https://sources.debian.net/src/libffi/3.3-6/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libffi/3.3-6/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libgcrypt20=1.8.7-6`
Binary Packages:
- `libgcrypt20:amd64=1.8.7-6`
Licenses: (parsed from: `/usr/share/doc/libgcrypt20/copyright`)
- `GPL-2`
- `LGPL`
Source:
```console
$ apt-get source -qq --print-uris libgcrypt20=1.8.7-6
'http://deb.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6.dsc' libgcrypt20_1.8.7-6.dsc 2800 SHA256:af433c97fde6172bb51d458e66acd33c66052bdf78ad72f7034f0b1851015959
'http://deb.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7.orig.tar.bz2' libgcrypt20_1.8.7.orig.tar.bz2 2985660 SHA256:03b70f028299561b7034b8966d7dd77ef16ed139c43440925fe8782561974748
'http://deb.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7.orig.tar.bz2.asc' libgcrypt20_1.8.7.orig.tar.bz2.asc 228 SHA256:eed6bb4174433640a02c1dc8851f34f85ec55b43d76a24bec87d7175784ef614
'http://deb.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.7-6.debian.tar.xz' libgcrypt20_1.8.7-6.debian.tar.xz 37564 SHA256:3fe8290b67416579fc99648ba025b8de732c4cc541b60b5f96f53d42a38916f5
```
Other potentially useful URLs:
- https://sources.debian.net/src/libgcrypt20/1.8.7-6/ (for browsing the source)
- https://sources.debian.net/src/libgcrypt20/1.8.7-6/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libgcrypt20/1.8.7-6/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libgpg-error=1.38-2`
Binary Packages:
- `libgpg-error0:amd64=1.38-2`
Licenses: (parsed from: `/usr/share/doc/libgpg-error0/copyright`)
- `BSD-3-clause`
- `GPL-3`
- `GPL-3+`
- `LGPL-2.1`
- `LGPL-2.1+`
- `g10-permissive`
Source:
```console
$ apt-get source -qq --print-uris libgpg-error=1.38-2
'http://deb.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error_1.38-2.dsc' libgpg-error_1.38-2.dsc 2220 SHA256:ab0ea76aa3552afa664210a871abc74637acafd89c068edf8dc03521b8e22d64
'http://deb.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error_1.38.orig.tar.bz2' libgpg-error_1.38.orig.tar.bz2 957637 SHA256:d8988275aa69d7149f931c10442e9e34c0242674249e171592b430ff7b3afd02
'http://deb.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error_1.38.orig.tar.bz2.asc' libgpg-error_1.38.orig.tar.bz2.asc 488 SHA256:d80eb927d85e19e96d8de17552f8f48b517ae7acac7685404e8027475c5b4330
'http://deb.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error_1.38-2.debian.tar.xz' libgpg-error_1.38-2.debian.tar.xz 19544 SHA256:824bcb278ead676c20f174bd551b1cc44a294137fabe6a1d892667882f3b4ba2
```
Other potentially useful URLs:
- https://sources.debian.net/src/libgpg-error/1.38-2/ (for browsing the source)
- https://sources.debian.net/src/libgpg-error/1.38-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libgpg-error/1.38-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libidn2=2.3.0-5`
Binary Packages:
- `libidn2-0:amd64=2.3.0-5`
Licenses: (parsed from: `/usr/share/doc/libidn2-0/copyright`)
- `GPL-2`
- `GPL-2+`
- `GPL-3`
- `GPL-3+`
- `LGPL-3`
- `LGPL-3+`
- `Unicode`
Source:
```console
$ apt-get source -qq --print-uris libidn2=2.3.0-5
'http://deb.debian.org/debian/pool/main/libi/libidn2/libidn2_2.3.0-5.dsc' libidn2_2.3.0-5.dsc 2046 SHA256:f8a787741b2395fe87c2773252e539bcc068fde0a5367316082cbbd2fed2be16
'http://deb.debian.org/debian/pool/main/libi/libidn2/libidn2_2.3.0.orig.tar.gz' libidn2_2.3.0.orig.tar.gz 2164993 SHA256:e1cb1db3d2e249a6a3eb6f0946777c2e892d5c5dc7bd91c74394fc3a01cab8b5
'http://deb.debian.org/debian/pool/main/libi/libidn2/libidn2_2.3.0-5.debian.tar.xz' libidn2_2.3.0-5.debian.tar.xz 11276 SHA256:e061b97d035e374bc6a948a514c26ad7d1bda31c8147cc8db02e604c82865a15
```
Other potentially useful URLs:
- https://sources.debian.net/src/libidn2/2.3.0-5/ (for browsing the source)
- https://sources.debian.net/src/libidn2/2.3.0-5/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libidn2/2.3.0-5/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libnsl=1.3.0-2`
Binary Packages:
- `libnsl2:amd64=1.3.0-2`
Licenses: (parsed from: `/usr/share/doc/libnsl2/copyright`)
- `BSD-3-clause`
- `GPL-2`
- `GPL-2+-autoconf-exception`
- `GPL-2+-libtool-exception`
- `GPL-3`
- `GPL-3+-autoconf-exception`
- `LGPL-2.1`
- `LGPL-2.1+`
- `MIT`
- `permissive-autoconf-m4`
- `permissive-autoconf-m4-no-warranty`
- `permissive-configure`
- `permissive-fsf`
- `permissive-makefile-in`
Source:
```console
$ apt-get source -qq --print-uris libnsl=1.3.0-2
'http://deb.debian.org/debian/pool/main/libn/libnsl/libnsl_1.3.0-2.dsc' libnsl_1.3.0-2.dsc 1955 SHA256:1da570eed6693c774cce51f3c33f989d1aa4bf1dcb8660818d8a834a1a3728ef
'http://deb.debian.org/debian/pool/main/libn/libnsl/libnsl_1.3.0.orig.tar.xz' libnsl_1.3.0.orig.tar.xz 321488 SHA256:eac3062957fa302c62eff4aed718a07bacbf9ceb0a058289f12a19bfdda3c8e2
'http://deb.debian.org/debian/pool/main/libn/libnsl/libnsl_1.3.0-2.debian.tar.xz' libnsl_1.3.0-2.debian.tar.xz 4692 SHA256:7f8dccc706931b9e206448ffb475487a4a0abaded27cf611d418f4a34415dca7
```
Other potentially useful URLs:
- https://sources.debian.net/src/libnsl/1.3.0-2/ (for browsing the source)
- https://sources.debian.net/src/libnsl/1.3.0-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libnsl/1.3.0-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libseccomp=2.5.1-1+deb11u1`
Binary Packages:
- `libseccomp2:amd64=2.5.1-1+deb11u1`
Licenses: (parsed from: `/usr/share/doc/libseccomp2/copyright`)
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris libseccomp=2.5.1-1+deb11u1
'http://deb.debian.org/debian/pool/main/libs/libseccomp/libseccomp_2.5.1-1%2bdeb11u1.dsc' libseccomp_2.5.1-1+deb11u1.dsc 2708 SHA256:6a2a00eb5f45e794a2203805348a3990d46f4ded63f8708d3382994ece729436
'http://deb.debian.org/debian/pool/main/libs/libseccomp/libseccomp_2.5.1.orig.tar.gz' libseccomp_2.5.1.orig.tar.gz 638811 SHA256:ee307e383c77aa7995abc5ada544d51c9723ae399768a97667d4cdb3c3a30d55
'http://deb.debian.org/debian/pool/main/libs/libseccomp/libseccomp_2.5.1.orig.tar.gz.asc' libseccomp_2.5.1.orig.tar.gz.asc 833 SHA256:14d45c86e5ceed5ac5511c3ebf70a4dca128b7584b314dc8a551c779ea225d2e
'http://deb.debian.org/debian/pool/main/libs/libseccomp/libseccomp_2.5.1-1%2bdeb11u1.debian.tar.xz' libseccomp_2.5.1-1+deb11u1.debian.tar.xz 19524 SHA256:a09ef7c0b9b6464f426b78a7b978d8566da53667c1a234234ffd2cc600543200
```
Other potentially useful URLs:
- https://sources.debian.net/src/libseccomp/2.5.1-1+deb11u1/ (for browsing the source)
- https://sources.debian.net/src/libseccomp/2.5.1-1+deb11u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libseccomp/2.5.1-1+deb11u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libselinux=3.1-3`
Binary Packages:
- `libselinux1:amd64=3.1-3`
Licenses: (parsed from: `/usr/share/doc/libselinux1/copyright`)
- `GPL-2`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris libselinux=3.1-3
'http://deb.debian.org/debian/pool/main/libs/libselinux/libselinux_3.1-3.dsc' libselinux_3.1-3.dsc 2300 SHA256:42810484f3776af09a2e0ab726e3be877fc8a54d6bf51702e46c22f945ab5177
'http://deb.debian.org/debian/pool/main/libs/libselinux/libselinux_3.1.orig.tar.gz' libselinux_3.1.orig.tar.gz 204703 SHA256:ea5dcbb4d859e3f999c26a13c630da2f16dff9462e3cc8cb7b458ac157d112e7
'http://deb.debian.org/debian/pool/main/libs/libselinux/libselinux_3.1-3.debian.tar.xz' libselinux_3.1-3.debian.tar.xz 24176 SHA256:7170ab6914f0d2e93de169da312df961f799f5d58cc0a4c552e3f8a7882f3c81
```
Other potentially useful URLs:
- https://sources.debian.net/src/libselinux/3.1-3/ (for browsing the source)
- https://sources.debian.net/src/libselinux/3.1-3/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libselinux/3.1-3/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libsemanage=3.1-1`
Binary Packages:
- `libsemanage-common=3.1-1`
- `libsemanage1:amd64=3.1-1+b2`
Licenses: (parsed from: `/usr/share/doc/libsemanage-common/copyright`, `/usr/share/doc/libsemanage1/copyright`)
- `GPL`
- `LGPL`
Source:
```console
$ apt-get source -qq --print-uris libsemanage=3.1-1
'http://deb.debian.org/debian/pool/main/libs/libsemanage/libsemanage_3.1-1.dsc' libsemanage_3.1-1.dsc 2339 SHA256:d49f9c29d0ad9c8b42145e0926919df962b58823e9fc22002bbb00333276170d
'http://deb.debian.org/debian/pool/main/libs/libsemanage/libsemanage_3.1.orig.tar.gz' libsemanage_3.1.orig.tar.gz 179601 SHA256:22d6c75526e40d1781c30bcf29abf97171bdfe6780923f11c8e1c76a75a21ff8
'http://deb.debian.org/debian/pool/main/libs/libsemanage/libsemanage_3.1-1.debian.tar.xz' libsemanage_3.1-1.debian.tar.xz 17556 SHA256:185b151158faaaf3d8f9ff939f29efd3eb5dbb050d01a87d3fde6cf40e778648
```
Other potentially useful URLs:
- https://sources.debian.net/src/libsemanage/3.1-1/ (for browsing the source)
- https://sources.debian.net/src/libsemanage/3.1-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libsemanage/3.1-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libsepol=3.1-1`
Binary Packages:
- `libsepol1:amd64=3.1-1`
Licenses: (parsed from: `/usr/share/doc/libsepol1/copyright`)
- `GPL`
- `LGPL`
Source:
```console
$ apt-get source -qq --print-uris libsepol=3.1-1
'http://deb.debian.org/debian/pool/main/libs/libsepol/libsepol_3.1-1.dsc' libsepol_3.1-1.dsc 1776 SHA256:37bfb6797af8a96eada6c6ace374292b8a16a6bfb557b1e8ab9fd29e72d5888a
'http://deb.debian.org/debian/pool/main/libs/libsepol/libsepol_3.1.orig.tar.gz' libsepol_3.1.orig.tar.gz 473842 SHA256:ae6778d01443fdd38cd30eeee846494e19f4d407b09872580372f4aa4bf8a3cc
'http://deb.debian.org/debian/pool/main/libs/libsepol/libsepol_3.1-1.debian.tar.xz' libsepol_3.1-1.debian.tar.xz 14584 SHA256:9351a0b6207f6a5da2951292d3ec5655feb89df5aabc9010094766d811156166
```
Other potentially useful URLs:
- https://sources.debian.net/src/libsepol/3.1-1/ (for browsing the source)
- https://sources.debian.net/src/libsepol/3.1-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libsepol/3.1-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libtasn1-6=4.16.0-2`
Binary Packages:
- `libtasn1-6:amd64=4.16.0-2`
Licenses: (parsed from: `/usr/share/doc/libtasn1-6/copyright`)
- `GFDL-1.3`
- `GPL-3`
- `LGPL`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris libtasn1-6=4.16.0-2
'http://deb.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2.dsc' libtasn1-6_4.16.0-2.dsc 2586 SHA256:fd4a387c71f95c3eceb1072a3f42c7021d73128027ea41a18d6efc6cbfdd764a
'http://deb.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0.orig.tar.gz' libtasn1-6_4.16.0.orig.tar.gz 1812442 SHA256:0e0fb0903839117cb6e3b56e68222771bebf22ad7fc2295a0ed7d576e8d4329d
'http://deb.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0.orig.tar.gz.asc' libtasn1-6_4.16.0.orig.tar.gz.asc 488 SHA256:06c201e8c3b43c27465ed79294d4c4ec8dcd3e95e4a6176ecbf273229ee3e2d0
'http://deb.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.16.0-2.debian.tar.xz' libtasn1-6_4.16.0-2.debian.tar.xz 17740 SHA256:c1a89b0bac0fb7c83ebac4eafbca0475c24350ade6ccaef31266424725610624
```
Other potentially useful URLs:
- https://sources.debian.net/src/libtasn1-6/4.16.0-2/ (for browsing the source)
- https://sources.debian.net/src/libtasn1-6/4.16.0-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libtasn1-6/4.16.0-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libtirpc=1.3.1-1`
Binary Packages:
- `libtirpc-common=1.3.1-1`
- `libtirpc3:amd64=1.3.1-1`
Licenses: (parsed from: `/usr/share/doc/libtirpc-common/copyright`, `/usr/share/doc/libtirpc3/copyright`)
- `BSD-2-Clause`
- `BSD-3-Clause`
- `BSD-4-Clause`
- `GPL-2`
- `LGPL-2.1`
- `LGPL-2.1+`
- `PERMISSIVE`
- `__AUTO_PERMISSIVE__`
Source:
```console
$ apt-get source -qq --print-uris libtirpc=1.3.1-1
'http://deb.debian.org/debian/pool/main/libt/libtirpc/libtirpc_1.3.1-1.dsc' libtirpc_1.3.1-1.dsc 2111 SHA256:b143e375f621a5a64858c068692304febe222da8f648b89254507eda3e97c68a
'http://deb.debian.org/debian/pool/main/libt/libtirpc/libtirpc_1.3.1.orig.tar.bz2' libtirpc_1.3.1.orig.tar.bz2 513399 SHA256:245895caf066bec5e3d4375942c8cb4366adad184c29c618d97f724ea309ee17
'http://deb.debian.org/debian/pool/main/libt/libtirpc/libtirpc_1.3.1-1.debian.tar.xz' libtirpc_1.3.1-1.debian.tar.xz 10788 SHA256:5012cff4ebc5db473b4fb29e1661bde4354c25b2e23a05df28d2f03ba0547881
```
Other potentially useful URLs:
- https://sources.debian.net/src/libtirpc/1.3.1-1/ (for browsing the source)
- https://sources.debian.net/src/libtirpc/1.3.1-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libtirpc/1.3.1-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libunistring=0.9.10-4`
Binary Packages:
- `libunistring2:amd64=0.9.10-4`
Licenses: (parsed from: `/usr/share/doc/libunistring2/copyright`)
- `FreeSoftware`
- `GFDL-1.2`
- `GFDL-1.2+`
- `GPL-2`
- `GPL-2+`
- `GPL-2+ with distribution exception`
- `GPL-3`
- `GPL-3+`
- `LGPL-3`
- `LGPL-3+`
- `MIT`
Source:
```console
$ apt-get source -qq --print-uris libunistring=0.9.10-4
'http://deb.debian.org/debian/pool/main/libu/libunistring/libunistring_0.9.10-4.dsc' libunistring_0.9.10-4.dsc 2212 SHA256:5c7940807b538d4204506349cbd67e5c677afb9f0e46e94455353e3f746a481e
'http://deb.debian.org/debian/pool/main/libu/libunistring/libunistring_0.9.10.orig.tar.xz' libunistring_0.9.10.orig.tar.xz 2051320 SHA256:eb8fb2c3e4b6e2d336608377050892b54c3c983b646c561836550863003c05d7
'http://deb.debian.org/debian/pool/main/libu/libunistring/libunistring_0.9.10.orig.tar.xz.asc' libunistring_0.9.10.orig.tar.xz.asc 1310 SHA256:e1606f691034fa21b00e08269622743547c16d21cca6c8a64156b4774a49e78e
'http://deb.debian.org/debian/pool/main/libu/libunistring/libunistring_0.9.10-4.debian.tar.xz' libunistring_0.9.10-4.debian.tar.xz 40936 SHA256:6c9554e1a1c6d0a02ca4868a5422d176e57a3131c1a8a21de5503b164997525c
```
Other potentially useful URLs:
- https://sources.debian.net/src/libunistring/0.9.10-4/ (for browsing the source)
- https://sources.debian.net/src/libunistring/0.9.10-4/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libunistring/0.9.10-4/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libxcrypt=1:4.4.18-4`
Binary Packages:
- `libcrypt1:amd64=1:4.4.18-4`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris libxcrypt=1:4.4.18-4
'http://deb.debian.org/debian/pool/main/libx/libxcrypt/libxcrypt_4.4.18-4.dsc' libxcrypt_4.4.18-4.dsc 1477 SHA256:5c0ca54ddad5343596f6c0916caf30fbb9b8a144252b49dc74f97502f33cdb7a
'http://deb.debian.org/debian/pool/main/libx/libxcrypt/libxcrypt_4.4.18.orig.tar.xz' libxcrypt_4.4.18.orig.tar.xz 397776 SHA256:4cd2a06e98519d57a5572ee8885b6cc23c70a559d234c161d3f22c487edaa3fa
'http://deb.debian.org/debian/pool/main/libx/libxcrypt/libxcrypt_4.4.18-4.debian.tar.xz' libxcrypt_4.4.18-4.debian.tar.xz 7560 SHA256:6c99b888c57e1411d870fa81d057e30444aa801ed430aa3126d31996e187dd84
```
Other potentially useful URLs:
- https://sources.debian.net/src/libxcrypt/1:4.4.18-4/ (for browsing the source)
- https://sources.debian.net/src/libxcrypt/1:4.4.18-4/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libxcrypt/1:4.4.18-4/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libzstd=1.4.8+dfsg-2.1`
Binary Packages:
- `libzstd1:amd64=1.4.8+dfsg-2.1`
Licenses: (parsed from: `/usr/share/doc/libzstd1/copyright`)
- `BSD-3-clause`
- `Expat`
- `GPL-2`
- `zlib`
Source:
```console
$ apt-get source -qq --print-uris libzstd=1.4.8+dfsg-2.1
'http://deb.debian.org/debian/pool/main/libz/libzstd/libzstd_1.4.8%2bdfsg-2.1.dsc' libzstd_1.4.8+dfsg-2.1.dsc 2274 SHA256:7c656b8cab7a560710358dddbd949b33b1ffcedd7cbef370132e4018b94e2e74
'http://deb.debian.org/debian/pool/main/libz/libzstd/libzstd_1.4.8%2bdfsg.orig.tar.xz' libzstd_1.4.8+dfsg.orig.tar.xz 1331996 SHA256:1e8ce5c4880a6d5bd8d3186e4186607dd19b64fc98a3877fc13aeefd566d67c5
'http://deb.debian.org/debian/pool/main/libz/libzstd/libzstd_1.4.8%2bdfsg-2.1.debian.tar.xz' libzstd_1.4.8+dfsg-2.1.debian.tar.xz 12224 SHA256:cba8544590e59303277e3af2bb260fed32723a1084c9f4928956deca2c80032c
```
Other potentially useful URLs:
- https://sources.debian.net/src/libzstd/1.4.8+dfsg-2.1/ (for browsing the source)
- https://sources.debian.net/src/libzstd/1.4.8+dfsg-2.1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libzstd/1.4.8+dfsg-2.1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `lsb=11.1.0`
Binary Packages:
- `lsb-base=11.1.0`
Licenses: (parsed from: `/usr/share/doc/lsb-base/copyright`)
- `BSD-3-clause`
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris lsb=11.1.0
'http://deb.debian.org/debian/pool/main/l/lsb/lsb_11.1.0.dsc' lsb_11.1.0.dsc 1800 SHA256:5cb5679dcc92e30aa878f892f73081d6b4d5299841549f6d53a886d51509feb1
'http://deb.debian.org/debian/pool/main/l/lsb/lsb_11.1.0.tar.xz' lsb_11.1.0.tar.xz 42452 SHA256:c7926d511228862892630070f7708c425db9473ceefc70872868c448b5145b57
```
Other potentially useful URLs:
- https://sources.debian.net/src/lsb/11.1.0/ (for browsing the source)
- https://sources.debian.net/src/lsb/11.1.0/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/lsb/11.1.0/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `lz4=1.9.3-2`
Binary Packages:
- `liblz4-1:amd64=1.9.3-2`
Licenses: (parsed from: `/usr/share/doc/liblz4-1/copyright`)
- `BSD-2-clause`
- `GPL-2`
- `GPL-2+`
Source:
```console
$ apt-get source -qq --print-uris lz4=1.9.3-2
'http://deb.debian.org/debian/pool/main/l/lz4/lz4_1.9.3-2.dsc' lz4_1.9.3-2.dsc 1959 SHA256:215e1f0be1bb40e2b89182f3a1bf630463d8acdc0917f1f928ad1bf9ef3e1b0c
'http://deb.debian.org/debian/pool/main/l/lz4/lz4_1.9.3.orig.tar.gz' lz4_1.9.3.orig.tar.gz 320958 SHA256:030644df4611007ff7dc962d981f390361e6c97a34e5cbc393ddfbe019ffe2c1
'http://deb.debian.org/debian/pool/main/l/lz4/lz4_1.9.3-2.debian.tar.xz' lz4_1.9.3-2.debian.tar.xz 13928 SHA256:d7754a7b7b1fa196666d6459705107355e15fef162352e363e43722e012a04e3
```
Other potentially useful URLs:
- https://sources.debian.net/src/lz4/1.9.3-2/ (for browsing the source)
- https://sources.debian.net/src/lz4/1.9.3-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/lz4/1.9.3-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `mawk=1.3.4.20200120-2`
Binary Packages:
- `mawk=1.3.4.20200120-2`
Licenses: (parsed from: `/usr/share/doc/mawk/copyright`)
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris mawk=1.3.4.20200120-2
'http://deb.debian.org/debian/pool/main/m/mawk/mawk_1.3.4.20200120-2.dsc' mawk_1.3.4.20200120-2.dsc 1915 SHA256:5069c46872ac74f5221250dfb88b31b1f2dbb8a2617c1e013f8f80cc34638c6d
'http://deb.debian.org/debian/pool/main/m/mawk/mawk_1.3.4.20200120.orig.tar.gz' mawk_1.3.4.20200120.orig.tar.gz 468855 SHA256:7fd4cd1e1fae9290fe089171181bbc6291dfd9bca939ca804f0ddb851c8b8237
'http://deb.debian.org/debian/pool/main/m/mawk/mawk_1.3.4.20200120-2.debian.tar.xz' mawk_1.3.4.20200120-2.debian.tar.xz 7504 SHA256:b772ed2f016b0286980c46cbc1f1f4ae62887ef2aa3dff6ef10cae638f923f26
```
Other potentially useful URLs:
- https://sources.debian.net/src/mawk/1.3.4.20200120-2/ (for browsing the source)
- https://sources.debian.net/src/mawk/1.3.4.20200120-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/mawk/1.3.4.20200120-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `ncurses=6.2+20201114-2`
Binary Packages:
- `libtinfo6:amd64=6.2+20201114-2`
- `ncurses-base=6.2+20201114-2`
- `ncurses-bin=6.2+20201114-2`
Licenses: (parsed from: `/usr/share/doc/libtinfo6/copyright`, `/usr/share/doc/ncurses-base/copyright`, `/usr/share/doc/ncurses-bin/copyright`)
- `BSD-3-clause`
- `MIT/X11`
- `X11`
Source:
```console
$ apt-get source -qq --print-uris ncurses=6.2+20201114-2
'http://deb.debian.org/debian/pool/main/n/ncurses/ncurses_6.2%2b20201114-2.dsc' ncurses_6.2+20201114-2.dsc 4106 SHA256:011ec8e3464be0d89d6611ab8fa0a84ac5514c0064e12dec9c52ec7b135408b1
'http://deb.debian.org/debian/pool/main/n/ncurses/ncurses_6.2%2b20201114.orig.tar.gz' ncurses_6.2+20201114.orig.tar.gz 3539796 SHA256:aa3f8cfaff2a2b78f184274ec43d9da910c864e4b4d80fc47b5b48cba9154cd2
'http://deb.debian.org/debian/pool/main/n/ncurses/ncurses_6.2%2b20201114.orig.tar.gz.asc' ncurses_6.2+20201114.orig.tar.gz.asc 265 SHA256:91615d9d5575f9e974e78c6aca55e1885f42d1b2600cebec407be4471bb7a27d
'http://deb.debian.org/debian/pool/main/n/ncurses/ncurses_6.2%2b20201114-2.debian.tar.xz' ncurses_6.2+20201114-2.debian.tar.xz 51812 SHA256:6ebba60b18cf2aceaa67098bfed1b1aa31c03f1a500f45c65ab098ec0a2401d2
```
Other potentially useful URLs:
- https://sources.debian.net/src/ncurses/6.2+20201114-2/ (for browsing the source)
- https://sources.debian.net/src/ncurses/6.2+20201114-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/ncurses/6.2+20201114-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `nettle=3.7.3-1`
Binary Packages:
- `libhogweed6:amd64=3.7.3-1`
- `libnettle8:amd64=3.7.3-1`
Licenses: (parsed from: `/usr/share/doc/libhogweed6/copyright`, `/usr/share/doc/libnettle8/copyright`)
- `Expat`
- `GAP`
- `GPL`
- `GPL-2`
- `GPL-2+`
- `GPL-3+`
- `GPL-3+ with Autoconf exception`
- `LGPL`
- `LGPL-2`
- `LGPL-2+`
- `LGPL-3+`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris nettle=3.7.3-1
'http://deb.debian.org/debian/pool/main/n/nettle/nettle_3.7.3-1.dsc' nettle_3.7.3-1.dsc 2033 SHA256:63a1a80f37b6484f479dfa1cbd30152feff3b1a5a2161fdab05b90edde212c1f
'http://deb.debian.org/debian/pool/main/n/nettle/nettle_3.7.3.orig.tar.gz' nettle_3.7.3.orig.tar.gz 2383985 SHA256:661f5eb03f048a3b924c3a8ad2515d4068e40f67e774e8a26827658007e3bcf0
'http://deb.debian.org/debian/pool/main/n/nettle/nettle_3.7.3-1.debian.tar.xz' nettle_3.7.3-1.debian.tar.xz 21956 SHA256:97af0e306aec6f6c5d8e73a7a3ce2856c76bcff9cdcfa7640e932a5a3aee9f24
```
Other potentially useful URLs:
- https://sources.debian.net/src/nettle/3.7.3-1/ (for browsing the source)
- https://sources.debian.net/src/nettle/3.7.3-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/nettle/3.7.3-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `openssl=1.1.1k-1+deb11u1`
Binary Packages:
- `libssl1.1:amd64=1.1.1k-1+deb11u1`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris openssl=1.1.1k-1+deb11u1
'http://deb.debian.org/debian/pool/main/o/openssl/openssl_1.1.1k-1%2bdeb11u1.dsc' openssl_1.1.1k-1+deb11u1.dsc 2652 SHA256:6515a9f1250c42bd42d8439820dde65fbd6f4fbb4a9673bff58e37168a7d3228
'http://deb.debian.org/debian/pool/main/o/openssl/openssl_1.1.1k.orig.tar.gz' openssl_1.1.1k.orig.tar.gz 9823400 SHA256:892a0875b9872acd04a9fde79b1f943075d5ea162415de3047c327df33fbaee5
'http://deb.debian.org/debian/pool/main/o/openssl/openssl_1.1.1k.orig.tar.gz.asc' openssl_1.1.1k.orig.tar.gz.asc 488 SHA256:addeaa197444a62c6063d7f819512c2c22b42141dec9d8ec3bff7e4518e1d1c9
'http://deb.debian.org/debian/pool/main/o/openssl/openssl_1.1.1k-1%2bdeb11u1.debian.tar.xz' openssl_1.1.1k-1+deb11u1.debian.tar.xz 94244 SHA256:68e00f47162ecea0273b4ba453503307b8430bb2d163f92cbbec6f51b11061fd
```
Other potentially useful URLs:
- https://sources.debian.net/src/openssl/1.1.1k-1+deb11u1/ (for browsing the source)
- https://sources.debian.net/src/openssl/1.1.1k-1+deb11u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/openssl/1.1.1k-1+deb11u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `p11-kit=0.23.22-1`
Binary Packages:
- `libp11-kit0:amd64=0.23.22-1`
Licenses: (parsed from: `/usr/share/doc/libp11-kit0/copyright`)
- `BSD-3-Clause`
- `ISC`
- `ISC+IBM`
- `permissive-like-automake-output`
- `same-as-rest-of-p11kit`
Source:
```console
$ apt-get source -qq --print-uris p11-kit=0.23.22-1
'http://deb.debian.org/debian/pool/main/p/p11-kit/p11-kit_0.23.22-1.dsc' p11-kit_0.23.22-1.dsc 2417 SHA256:b5f7a7908a7da082fa74c2a35667f4f4dd1324eaf43ff4b4a0ffa7e2763774a6
'http://deb.debian.org/debian/pool/main/p/p11-kit/p11-kit_0.23.22.orig.tar.xz' p11-kit_0.23.22.orig.tar.xz 830016 SHA256:8a8f40153dd5a3f8e7c03e641f8db400133fb2a6a9ab2aee1b6d0cb0495ec6b6
'http://deb.debian.org/debian/pool/main/p/p11-kit/p11-kit_0.23.22.orig.tar.xz.asc' p11-kit_0.23.22.orig.tar.xz.asc 854 SHA256:52d36bd38ed84dcc394b97da18ff4b4e220f0b13c5e7922f5b908312678b0b02
'http://deb.debian.org/debian/pool/main/p/p11-kit/p11-kit_0.23.22-1.debian.tar.xz' p11-kit_0.23.22-1.debian.tar.xz 22256 SHA256:05a157dbeb054dd14c19c0c4f72c50e57fb69c4cfa4b5d34bc7ecdb5d12e7265
```
Other potentially useful URLs:
- https://sources.debian.net/src/p11-kit/0.23.22-1/ (for browsing the source)
- https://sources.debian.net/src/p11-kit/0.23.22-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/p11-kit/0.23.22-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `pam=1.4.0-9+deb11u1`
Binary Packages:
- `libpam-modules:amd64=1.4.0-9+deb11u1`
- `libpam-modules-bin=1.4.0-9+deb11u1`
- `libpam-runtime=1.4.0-9+deb11u1`
- `libpam0g:amd64=1.4.0-9+deb11u1`
Licenses: (parsed from: `/usr/share/doc/libpam-modules/copyright`, `/usr/share/doc/libpam-modules-bin/copyright`, `/usr/share/doc/libpam-runtime/copyright`, `/usr/share/doc/libpam0g/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris pam=1.4.0-9+deb11u1
'http://deb.debian.org/debian/pool/main/p/pam/pam_1.4.0-9%2bdeb11u1.dsc' pam_1.4.0-9+deb11u1.dsc 1941 SHA256:190b705cc9daeee1febb84e8ac6f31219065f08ff41c8d38fbbb424b545d5ca4
'http://deb.debian.org/debian/pool/main/p/pam/pam_1.4.0.orig.tar.xz' pam_1.4.0.orig.tar.xz 988908 SHA256:cd6d928c51e64139be3bdb38692c68183a509b83d4f2c221024ccd4bcddfd034
'http://deb.debian.org/debian/pool/main/p/pam/pam_1.4.0-9%2bdeb11u1.debian.tar.xz' pam_1.4.0-9+deb11u1.debian.tar.xz 120148 SHA256:bcaaad9423c3ab32c5c4f9e363595a84fe3c535aa9568e42e560028a4e33dfcf
```
Other potentially useful URLs:
- https://sources.debian.net/src/pam/1.4.0-9+deb11u1/ (for browsing the source)
- https://sources.debian.net/src/pam/1.4.0-9+deb11u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/pam/1.4.0-9+deb11u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `pcre2=10.36-2`
Binary Packages:
- `libpcre2-8-0:amd64=10.36-2`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris pcre2=10.36-2
'http://deb.debian.org/debian/pool/main/p/pcre2/pcre2_10.36-2.dsc' pcre2_10.36-2.dsc 2286 SHA256:317f27fd2c578c87b3753a267da2290dc6970c16c81f1f1761694c977a4be4f5
'http://deb.debian.org/debian/pool/main/p/pcre2/pcre2_10.36.orig.tar.gz' pcre2_10.36.orig.tar.gz 2290719 SHA256:b95ddb9414f91a967a887d69617059fb672b914f56fa3d613812c1ee8e8a1a37
'http://deb.debian.org/debian/pool/main/p/pcre2/pcre2_10.36-2.diff.gz' pcre2_10.36-2.diff.gz 6799 SHA256:9a39c9972fac99b020b900bcba16cb18a5ef8d0c8ac7a6df1060193b9fa6ba83
```
Other potentially useful URLs:
- https://sources.debian.net/src/pcre2/10.36-2/ (for browsing the source)
- https://sources.debian.net/src/pcre2/10.36-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/pcre2/10.36-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `pcre3=2:8.39-13`
Binary Packages:
- `libpcre3:amd64=2:8.39-13`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris pcre3=2:8.39-13
'http://deb.debian.org/debian/pool/main/p/pcre3/pcre3_8.39-13.dsc' pcre3_8.39-13.dsc 2226 SHA256:c3a2eb4f02de5b2e00787ed2a35eb82f04ee4b5e99b8ff279bae3c6453aad93b
'http://deb.debian.org/debian/pool/main/p/pcre3/pcre3_8.39.orig.tar.bz2' pcre3_8.39.orig.tar.bz2 1560758 SHA256:b858099f82483031ee02092711689e7245586ada49e534a06e678b8ea9549e8b
'http://deb.debian.org/debian/pool/main/p/pcre3/pcre3_8.39-13.debian.tar.gz' pcre3_8.39-13.debian.tar.gz 27002 SHA256:a2143d7358d69b61955a4f977980050447f8891c0e6737080f2b14b920fbde87
```
Other potentially useful URLs:
- https://sources.debian.net/src/pcre3/2:8.39-13/ (for browsing the source)
- https://sources.debian.net/src/pcre3/2:8.39-13/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/pcre3/2:8.39-13/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `perl=5.32.1-4+deb11u2`
Binary Packages:
- `perl-base=5.32.1-4+deb11u2`
Licenses: (parsed from: `/usr/share/doc/perl-base/copyright`)
- `Artistic`
- `Artistic,`
- `Artistic-2`
- `Artistic-dist`
- `BSD-3-clause`
- `BSD-3-clause-GENERIC`
- `BSD-3-clause-with-weird-numbering`
- `BSD-4-clause-POWERDOG`
- `BZIP`
- `CC0-1.0`
- `DONT-CHANGE-THE-GPL`
- `Expat`
- `GPL-1`
- `GPL-1+`
- `GPL-2`
- `GPL-2+`
- `GPL-3+-WITH-BISON-EXCEPTION`
- `HSIEH-BSD`
- `HSIEH-DERIVATIVE`
- `LGPL-2.1`
- `REGCOMP`
- `REGCOMP,`
- `RRA-KEEP-THIS-NOTICE`
- `SDBM-PUBLIC-DOMAIN`
- `TEXT-TABS`
- `Unicode`
- `ZLIB`
Source:
```console
$ apt-get source -qq --print-uris perl=5.32.1-4+deb11u2
'http://deb.debian.org/debian/pool/main/p/perl/perl_5.32.1-4%2bdeb11u2.dsc' perl_5.32.1-4+deb11u2.dsc 2918 SHA256:3c0c1961d7a5fe835cf7d1a9a97905ff7857db2cd1d113c9fc5250de3aaa4e6b
'http://deb.debian.org/debian/pool/main/p/perl/perl_5.32.1.orig-regen-configure.tar.gz' perl_5.32.1.orig-regen-configure.tar.gz 871331 SHA256:1d179b41283f12ad83f9758430f6ddc49bdf20db5c396aeae7e51ebb4e4afd29
'http://deb.debian.org/debian/pool/main/p/perl/perl_5.32.1.orig.tar.xz' perl_5.32.1.orig.tar.xz 12610988 SHA256:57cc47c735c8300a8ce2fa0643507b44c4ae59012bfdad0121313db639e02309
'http://deb.debian.org/debian/pool/main/p/perl/perl_5.32.1-4%2bdeb11u2.debian.tar.xz' perl_5.32.1-4+deb11u2.debian.tar.xz 165768 SHA256:36b96f84a81c8db85a99e701062457a99efdbcc98b1f1a8912d3919f4b8e0f5a
```
Other potentially useful URLs:
- https://sources.debian.net/src/perl/5.32.1-4+deb11u2/ (for browsing the source)
- https://sources.debian.net/src/perl/5.32.1-4+deb11u2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/perl/5.32.1-4+deb11u2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `sed=4.7-1`
Binary Packages:
- `sed=4.7-1`
Licenses: (parsed from: `/usr/share/doc/sed/copyright`)
- `GPL-3`
Source:
```console
$ apt-get source -qq --print-uris sed=4.7-1
'http://deb.debian.org/debian/pool/main/s/sed/sed_4.7-1.dsc' sed_4.7-1.dsc 1880 SHA256:dd0e8daed987929920f7729771f9c7a5b48d094923aaf686efd2ab19db776108
'http://deb.debian.org/debian/pool/main/s/sed/sed_4.7.orig.tar.xz' sed_4.7.orig.tar.xz 1298316 SHA256:2885768cd0a29ff8d58a6280a270ff161f6a3deb5690b2be6c49f46d4c67bd6a
'http://deb.debian.org/debian/pool/main/s/sed/sed_4.7-1.debian.tar.xz' sed_4.7-1.debian.tar.xz 59824 SHA256:a2ab8d50807fd2242f86d6c6257399e790445ab6f8932f7f487d34361b4fc483
```
Other potentially useful URLs:
- https://sources.debian.net/src/sed/4.7-1/ (for browsing the source)
- https://sources.debian.net/src/sed/4.7-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/sed/4.7-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `shadow=1:4.8.1-1`
Binary Packages:
- `login=1:4.8.1-1`
- `passwd=1:4.8.1-1`
Licenses: (parsed from: `/usr/share/doc/login/copyright`, `/usr/share/doc/passwd/copyright`)
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris shadow=1:4.8.1-1
'http://deb.debian.org/debian/pool/main/s/shadow/shadow_4.8.1-1.dsc' shadow_4.8.1-1.dsc 2215 SHA256:5c9568dc183781ba654b7daeba6d5d6768d4e0417cc8d8b6f2e534dae6fcdaa6
'http://deb.debian.org/debian/pool/main/s/shadow/shadow_4.8.1.orig.tar.xz' shadow_4.8.1.orig.tar.xz 1611196 SHA256:a3ad4630bdc41372f02a647278a8c3514844295d36eefe68ece6c3a641c1ae62
'http://deb.debian.org/debian/pool/main/s/shadow/shadow_4.8.1-1.debian.tar.xz' shadow_4.8.1-1.debian.tar.xz 74752 SHA256:fdbccadc28fcca744f365e0529f3828d0c82bc3513b28976dca7308b40ea4773
```
Other potentially useful URLs:
- https://sources.debian.net/src/shadow/1:4.8.1-1/ (for browsing the source)
- https://sources.debian.net/src/shadow/1:4.8.1-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/shadow/1:4.8.1-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `systemd=247.3-6`
Binary Packages:
- `libsystemd0:amd64=247.3-6`
- `libudev1:amd64=247.3-6`
Licenses: (parsed from: `/usr/share/doc/libsystemd0/copyright`, `/usr/share/doc/libudev1/copyright`)
- `CC0-1.0`
- `Expat`
- `GPL-2`
- `GPL-2 with Linux-syscall-note exception`
- `GPL-2+`
- `LGPL-2.1`
- `LGPL-2.1+`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris systemd=247.3-6
'http://deb.debian.org/debian/pool/main/s/systemd/systemd_247.3-6.dsc' systemd_247.3-6.dsc 5322 SHA256:e40588013aa7435fa00517511566d6a41e07e69a2f91d2efd75275149dbefc86
'http://deb.debian.org/debian/pool/main/s/systemd/systemd_247.3.orig.tar.gz' systemd_247.3.orig.tar.gz 9895385 SHA256:2869986e219a8dfc96cc0dffac66e0c13bb70a89e16b85a3948876c146cfa3e0
'http://deb.debian.org/debian/pool/main/s/systemd/systemd_247.3-6.debian.tar.xz' systemd_247.3-6.debian.tar.xz 167220 SHA256:1b2f1fe5d9dcaba111fe92eb12bf9635db7ddbeb70e57a59a601decfa010ffeb
```
Other potentially useful URLs:
- https://sources.debian.net/src/systemd/247.3-6/ (for browsing the source)
- https://sources.debian.net/src/systemd/247.3-6/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/systemd/247.3-6/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `sysvinit=2.96-7`
Binary Packages:
- `sysvinit-utils=2.96-7`
Licenses: (parsed from: `/usr/share/doc/sysvinit-utils/copyright`)
- `GPL-2`
- `GPL-2+`
Source:
```console
$ apt-get source -qq --print-uris sysvinit=2.96-7
'http://deb.debian.org/debian/pool/main/s/sysvinit/sysvinit_2.96-7.dsc' sysvinit_2.96-7.dsc 2586 SHA256:f8e7afbf2f6a5d1e31bbe75ebebdf6a14917494c32868ead3769f5555cbab62c
'http://deb.debian.org/debian/pool/main/s/sysvinit/sysvinit_2.96.orig.tar.xz' sysvinit_2.96.orig.tar.xz 122164 SHA256:2a2e26b72aa235a23ab1c8471005f890309ce1196c83fbc9413c57b9ab62b587
'http://deb.debian.org/debian/pool/main/s/sysvinit/sysvinit_2.96.orig.tar.xz.asc' sysvinit_2.96.orig.tar.xz.asc 313 SHA256:dfc184b95da12c8c888c8ae6b0f26fe8a23b07fbcdd240f6600a8a78b9439fa0
'http://deb.debian.org/debian/pool/main/s/sysvinit/sysvinit_2.96-7.debian.tar.xz' sysvinit_2.96-7.debian.tar.xz 129040 SHA256:c56a9576fccfd85b4463d4a1bce2f8b006cc4228136c34222734738a4880758f
```
Other potentially useful URLs:
- https://sources.debian.net/src/sysvinit/2.96-7/ (for browsing the source)
- https://sources.debian.net/src/sysvinit/2.96-7/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/sysvinit/2.96-7/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `tar=1.34+dfsg-1`
Binary Packages:
- `tar=1.34+dfsg-1`
Licenses: (parsed from: `/usr/share/doc/tar/copyright`)
- `GPL-2`
- `GPL-3`
Source:
```console
$ apt-get source -qq --print-uris tar=1.34+dfsg-1
'http://deb.debian.org/debian/pool/main/t/tar/tar_1.34%2bdfsg-1.dsc' tar_1.34+dfsg-1.dsc 2015 SHA256:12d709cd77e38e5d1753325a9f266b340b5c095a426f438c677b42c031949d89
'http://deb.debian.org/debian/pool/main/t/tar/tar_1.34%2bdfsg.orig.tar.xz' tar_1.34+dfsg.orig.tar.xz 1981736 SHA256:7d57029540cb928394defb3b377b3531237c947e795b51aa8acac0c5ba0e4844
'http://deb.debian.org/debian/pool/main/t/tar/tar_1.34%2bdfsg-1.debian.tar.xz' tar_1.34+dfsg-1.debian.tar.xz 19192 SHA256:7228f5cbd36f937dfc1fec042dee8b3e02d92a06afdd44c586c2c8cfb1905538
```
Other potentially useful URLs:
- https://sources.debian.net/src/tar/1.34+dfsg-1/ (for browsing the source)
- https://sources.debian.net/src/tar/1.34+dfsg-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/tar/1.34+dfsg-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `tzdata=2021a-1+deb11u2`
Binary Packages:
- `tzdata=2021a-1+deb11u2`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris tzdata=2021a-1+deb11u2
'http://deb.debian.org/debian/pool/main/t/tzdata/tzdata_2021a-1%2bdeb11u2.dsc' tzdata_2021a-1+deb11u2.dsc 2269 SHA256:2893ddffaf6cdad7b672c3c21449c32bd3febef0af53a944dcf6845af999f334
'http://deb.debian.org/debian/pool/main/t/tzdata/tzdata_2021a.orig.tar.gz' tzdata_2021a.orig.tar.gz 411892 SHA256:39e7d2ba08c68cbaefc8de3227aab0dec2521be8042cf56855f7dc3a9fb14e08
'http://deb.debian.org/debian/pool/main/t/tzdata/tzdata_2021a.orig.tar.gz.asc' tzdata_2021a.orig.tar.gz.asc 833 SHA256:9dc5f54674166f4ffbc2d4485e656227430ab5f39c9006e6ed9986281117f058
'http://deb.debian.org/debian/pool/main/t/tzdata/tzdata_2021a-1%2bdeb11u2.debian.tar.xz' tzdata_2021a-1+deb11u2.debian.tar.xz 109132 SHA256:ede987eed945f744dea5866d77663f7d01589c3f97eb68a0dd484f24efbec9e2
```
Other potentially useful URLs:
- https://sources.debian.net/src/tzdata/2021a-1+deb11u2/ (for browsing the source)
- https://sources.debian.net/src/tzdata/2021a-1+deb11u2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/tzdata/2021a-1+deb11u2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `util-linux=2.36.1-8+deb11u1`
Binary Packages:
- `bsdutils=1:2.36.1-8+deb11u1`
- `libblkid1:amd64=2.36.1-8+deb11u1`
- `libmount1:amd64=2.36.1-8+deb11u1`
- `libsmartcols1:amd64=2.36.1-8+deb11u1`
- `libuuid1:amd64=2.36.1-8+deb11u1`
- `mount=2.36.1-8+deb11u1`
- `util-linux=2.36.1-8+deb11u1`
Licenses: (parsed from: `/usr/share/doc/bsdutils/copyright`, `/usr/share/doc/libblkid1/copyright`, `/usr/share/doc/libmount1/copyright`, `/usr/share/doc/libsmartcols1/copyright`, `/usr/share/doc/libuuid1/copyright`, `/usr/share/doc/mount/copyright`, `/usr/share/doc/util-linux/copyright`)
- `BSD-2-clause`
- `BSD-3-clause`
- `BSD-4-clause`
- `GPL-2`
- `GPL-2+`
- `GPL-3`
- `GPL-3+`
- `LGPL`
- `LGPL-2`
- `LGPL-2+`
- `LGPL-2.1`
- `LGPL-2.1+`
- `LGPL-3`
- `LGPL-3+`
- `MIT`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris util-linux=2.36.1-8+deb11u1
'http://security.debian.org/debian-security/pool/updates/main/u/util-linux/util-linux_2.36.1-8%2bdeb11u1.dsc' util-linux_2.36.1-8+deb11u1.dsc 4461 SHA256:1f42a8e46c383b6fbfb57ea89dfe5adb5b7e6960bc5e8cdb9589109773be6cac
'http://security.debian.org/debian-security/pool/updates/main/u/util-linux/util-linux_2.36.1.orig.tar.xz' util-linux_2.36.1.orig.tar.xz 5231880 SHA256:09fac242172cd8ec27f0739d8d192402c69417617091d8c6e974841568f37eed
'http://security.debian.org/debian-security/pool/updates/main/u/util-linux/util-linux_2.36.1-8%2bdeb11u1.debian.tar.xz' util-linux_2.36.1-8+deb11u1.debian.tar.xz 101448 SHA256:edda1d46e25a40e730c672aac2cd0e44f9eeb0fb64b27be957d6924ed5288a0a
```
Other potentially useful URLs:
- https://sources.debian.net/src/util-linux/2.36.1-8+deb11u1/ (for browsing the source)
- https://sources.debian.net/src/util-linux/2.36.1-8+deb11u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/util-linux/2.36.1-8+deb11u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `xxhash=0.8.0-2`
Binary Packages:
- `libxxhash0:amd64=0.8.0-2`
Licenses: (parsed from: `/usr/share/doc/libxxhash0/copyright`)
- `BSD-2-clause`
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris xxhash=0.8.0-2
'http://deb.debian.org/debian/pool/main/x/xxhash/xxhash_0.8.0-2.dsc' xxhash_0.8.0-2.dsc 1601 SHA256:91c696b5371558ebb12c323b0bd4e15eece0a439ef49c6aa5a6d0c1cf6c7762a
'http://deb.debian.org/debian/pool/main/x/xxhash/xxhash_0.8.0.orig.tar.gz' xxhash_0.8.0.orig.tar.gz 145909 SHA256:7054c3ebd169c97b64a92d7b994ab63c70dd53a06974f1f630ab782c28db0f4f
'http://deb.debian.org/debian/pool/main/x/xxhash/xxhash_0.8.0-2.debian.tar.xz' xxhash_0.8.0-2.debian.tar.xz 4160 SHA256:5c427c2c08019a945412afac02326a24c72b65a83bff59447009db303233aecd
```
Other potentially useful URLs:
- https://sources.debian.net/src/xxhash/0.8.0-2/ (for browsing the source)
- https://sources.debian.net/src/xxhash/0.8.0-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/xxhash/0.8.0-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `xz-utils=5.2.5-2`
Binary Packages:
- `liblzma5:amd64=5.2.5-2`
Licenses: (parsed from: `/usr/share/doc/liblzma5/copyright`)
- `Autoconf`
- `GPL-2`
- `GPL-2+`
- `GPL-3`
- `LGPL-2`
- `LGPL-2.1`
- `LGPL-2.1+`
- `PD`
- `PD-debian`
- `config-h`
- `noderivs`
- `none`
- `permissive-fsf`
- `permissive-nowarranty`
- `probably-PD`
Source:
```console
$ apt-get source -qq --print-uris xz-utils=5.2.5-2
'http://deb.debian.org/debian/pool/main/x/xz-utils/xz-utils_5.2.5-2.dsc' xz-utils_5.2.5-2.dsc 2312 SHA256:fa2706f0c863bee4715460bc9103c6fb73ad2cbc12d8d6d7d5dced81ab349949
'http://deb.debian.org/debian/pool/main/x/xz-utils/xz-utils_5.2.5.orig.tar.xz' xz-utils_5.2.5.orig.tar.xz 1148824 SHA256:3e1e518ffc912f86608a8cb35e4bd41ad1aec210df2a47aaa1f95e7f5576ef56
'http://deb.debian.org/debian/pool/main/x/xz-utils/xz-utils_5.2.5.orig.tar.xz.asc' xz-utils_5.2.5.orig.tar.xz.asc 833 SHA256:6efc0075a58912e640119d2b52ef7d1518b260d8720fadc73df21ab7fc727624
'http://deb.debian.org/debian/pool/main/x/xz-utils/xz-utils_5.2.5-2.debian.tar.xz' xz-utils_5.2.5-2.debian.tar.xz 33532 SHA256:7bf06a86c35cc6b21a7731df9e11d241f8d3c16b0fe6ed78d64506d1bc29b06e
```
Other potentially useful URLs:
- https://sources.debian.net/src/xz-utils/5.2.5-2/ (for browsing the source)
- https://sources.debian.net/src/xz-utils/5.2.5-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/xz-utils/5.2.5-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `zlib=1:1.2.11.dfsg-2`
Binary Packages:
- `zlib1g:amd64=1:1.2.11.dfsg-2`
Licenses: (parsed from: `/usr/share/doc/zlib1g/copyright`)
- `Zlib`
Source:
```console
$ apt-get source -qq --print-uris zlib=1:1.2.11.dfsg-2
'http://deb.debian.org/debian/pool/main/z/zlib/zlib_1.2.11.dfsg-2.dsc' zlib_1.2.11.dfsg-2.dsc 2397 SHA256:ce8c40737357aeaf17e9ca952a631c9bde4bcfc352c2bbe963836202b12c10a7
'http://deb.debian.org/debian/pool/main/z/zlib/zlib_1.2.11.dfsg.orig.tar.gz' zlib_1.2.11.dfsg.orig.tar.gz 370248 SHA256:80c481411a4fe8463aeb8270149a0e80bb9eaf7da44132b6e16f2b5af01bc899
'http://deb.debian.org/debian/pool/main/z/zlib/zlib_1.2.11.dfsg-2.debian.tar.xz' zlib_1.2.11.dfsg-2.debian.tar.xz 19244 SHA256:8602accb97cb92bd52e0d48fa958e67ccad4382a948cca716d5dd24bd0b43bd7
```
Other potentially useful URLs:
- https://sources.debian.net/src/zlib/1:1.2.11.dfsg-2/ (for browsing the source)
- https://sources.debian.net/src/zlib/1:1.2.11.dfsg-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/zlib/1:1.2.11.dfsg-2/ (for access to the source package after it no longer exists in the archive)
| docker-library/repo-info | repos/redis/local/5.md | Markdown | apache-2.0 | 86,431 |
<div data-dojo-type="dijit.layout.SplitContainer" data-dojo-props='orientation:"vertical"'>
<div data-dojo-type="dijit.layout.ContentPane" data-dojo-props='title:"split #1"'>
<p>Top of split container loaded via an href.</p>
</div>
<div data-dojo-type="dijit.layout.ContentPane" data-dojo-props='title:"split #2"'>
<p>Bottom of split container loaded via an href.</p>
<p>
Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Aenean
semper sagittis velit. Cras in mi. Duis porta mauris ut ligula. Proin
porta rutrum lacus. Etiam consequat scelerisque quam. Nulla facilisi.
Maecenas luctus venenatis nulla. In sit amet dui non mi semper iaculis.
Sed molestie tortor at ipsum. Morbi dictum rutrum magna. Sed vitae
risus.
</p>
<p>Aliquam vitae enim. Duis scelerisque metus auctor est venenatis
imperdiet. Fusce dignissim porta augue. Nulla vestibulum. Integer lorem
nunc, ullamcorper a, commodo ac, malesuada sed, dolor. Aenean id mi in
massa bibendum suscipit. Integer eros. Nullam suscipit mauris. In
pellentesque. Mauris ipsum est, pharetra semper, pharetra in, viverra
quis, tellus. Etiam purus. Quisque egestas, tortor ac cursus lacinia,
felis leo adipiscing nisi, et rhoncus elit dolor eget eros. Fusce ut
quam. Suspendisse eleifend leo vitae ligula. Nulla facilisi. Nulla
rutrum, erat vitae lacinia dictum, pede purus imperdiet lacus, ut
semper velit ante id metus. Praesent massa dolor, porttitor sed,
pulvinar in, consequat ut, leo. Nullam nec est. Aenean id risus blandit
tortor pharetra congue. Suspendisse pulvinar.
</p>
<p>Vestibulum convallis eros ac justo. Proin dolor. Etiam aliquam. Nam
ornare elit vel augue. Suspendisse potenti. Etiam sed mauris eu neque
nonummy mollis. Vestibulum vel purus ac pede semper accumsan. Vivamus
lobortis, sem vitae nonummy lacinia, nisl est gravida magna, non cursus
est quam sed urna. Phasellus adipiscing justo in ipsum. Duis sagittis
dolor sit amet magna. Suspendisse suscipit, neque eu dictum auctor,
nisi augue tincidunt arcu, non lacinia magna purus nec magna. Praesent
pretium sollicitudin sapien. Suspendisse imperdiet. Class aptent taciti
sociosqu ad litora torquent per conubia nostra, per inceptos
hymenaeos.
</p>
</div>
</div>
| sulistionoadi/belajar-springmvc-dojo | training-web/src/main/webapp/js/dojotoolkit/dijit/tests/layout/tab4.html | HTML | apache-2.0 | 2,261 |
/*!
* Module dependencies.
*/
var Command = require('./util/command'),
phonegapbuild = require('./util/phonegap-build'),
util = require('util');
/*!
* Command setup.
*/
module.exports = {
create: function(phonegap) {
return new RemoteLogoutCommand(phonegap);
}
};
function RemoteLogoutCommand(phonegap) {
return Command.apply(this, arguments);
}
util.inherits(RemoteLogoutCommand, Command);
/**
* Logout.
*
* Logout of PhoneGap/Build.
*
* Options:
*
* - `options` {Object} is unused and should be `{}`.
* - [`callback`] {Function} is a callback function.
* - `e` {Error} is null unless there is an error.
*
* Returns:
*
* {PhoneGap} for chaining.
*/
RemoteLogoutCommand.prototype.run = function(options, callback) {
var self = this;
// require options
if (!options) throw new Error('requires options parameter');
// optional callback
callback = callback || function() {};
// logout
phonegapbuild.logout(options, function(e) {
callback(e);
});
return self.phonegap;
};
| mati191188/ecopueblo-mobile | node_modules/phonegap/lib/phonegap/remote.logout.js | JavaScript | apache-2.0 | 1,079 |
/*
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.cas.services.jmx;
import org.jasig.cas.services.RegisteredService;
import org.jasig.cas.services.RegisteredServiceImpl;
import org.jasig.cas.services.ServicesManager;
import org.springframework.jmx.export.annotation.ManagedAttribute;
import org.springframework.jmx.export.annotation.ManagedOperation;
import org.springframework.jmx.export.annotation.ManagedOperationParameter;
import org.springframework.util.Assert;
import javax.validation.constraints.NotNull;
import java.util.ArrayList;
import java.util.List;
/**
* Abstract base class to support both the {@link org.jasig.cas.services.ServicesManager} and the
* {@link org.jasig.cas.services.ReloadableServicesManager}.
*
* @author <a href="mailto:tobias.trelle@proximity.de">Tobias Trelle</a>
* @author Scott Battaglia
* @version $Revision$ $Date$
* @since 3.4.4
*/
public abstract class AbstractServicesManagerMBean<T extends ServicesManager> {
@NotNull
private T servicesManager;
protected AbstractServicesManagerMBean(final T servicesManager) {
this.servicesManager = servicesManager;
}
protected final T getServicesManager() {
return this.servicesManager;
}
@ManagedAttribute(description = "Retrieves the list of Registered Services in a slightly friendlier output.")
public final List<String> getRegisteredServicesAsStrings() {
final List<String> services = new ArrayList<String>();
for (final RegisteredService r : this.servicesManager.getAllServices()) {
services.add(new StringBuilder().append("id: ").append(r.getId())
.append(" name: ").append(r.getName())
.append(" enabled: ").append(r.isEnabled())
.append(" ssoEnabled: ").append(r.isSsoEnabled())
.append(" serviceId: ").append(r.getServiceId())
.toString());
}
return services;
}
@ManagedOperation(description = "Can remove a service based on its identifier.")
@ManagedOperationParameter(name="id", description = "the identifier to remove")
public final RegisteredService removeService(final long id) {
return this.servicesManager.delete(id);
}
@ManagedOperation(description = "Disable a service by id.")
@ManagedOperationParameter(name="id", description = "the identifier to disable")
public final void disableService(final long id) {
changeEnabledState(id, false);
}
@ManagedOperation(description = "Enable a service by its id.")
@ManagedOperationParameter(name="id", description = "the identifier to enable.")
public final void enableService(final long id) {
changeEnabledState(id, true);
}
private void changeEnabledState(final long id, final boolean newState) {
final RegisteredService r = this.servicesManager.findServiceBy(id);
Assert.notNull(r, "invalid RegisteredService id");
// we screwed up our APIs in older versions of CAS, so we need to CAST this to do anything useful.
((RegisteredServiceImpl) r).setEnabled(newState);
this.servicesManager.save(r);
}
}
| briandwyer/cas-hudson | cas-server-core/src/main/java/org/jasig/cas/services/jmx/AbstractServicesManagerMBean.java | Java | apache-2.0 | 3,919 |
//go:build 386 || amd64p32 || arm || mipsle || mips64p32le
// +build 386 amd64p32 arm mipsle mips64p32le
package sys
import (
"unsafe"
)
// Pointer wraps an unsafe.Pointer to be 64bit to
// conform to the syscall specification.
type Pointer struct {
ptr unsafe.Pointer
pad uint32
}
| opencontainers/runc | vendor/github.com/cilium/ebpf/internal/sys/ptr_32_le.go | GO | apache-2.0 | 288 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.serving.core.models.policy.writer
case class AutoCalculatedFieldModel(
fromNotNullFields: Option[FromNotNullFieldsModel] = None,
fromPkFields: Option[FromPkFieldsModel] = None,
fromFields: Option[FromFieldsModel] = None,
fromFixedValue: Option[FromFixedValueModel] = None
)
| diegohurtado/sparta | serving-core/src/main/scala/com/stratio/sparta/serving/core/models/policy/writer/AutoCalculatedFieldModel.scala | Scala | apache-2.0 | 1,105 |
cask 'multibit' do
version '0.5.19'
sha256 'f84aefa0b3762e36659ea3e71806f747db4198641d658d88c8772978b23f99dc'
url "https://multibit.org/releases/multibit-classic/multibit-classic-#{version}/multibit-classic-macos-#{version}.dmg"
gpg "#{url}.asc",
:key_id => '23f7fb7b'
name 'MultiBit'
homepage 'https://multibit.org/'
license :mit
app 'MultiBit.app'
end
| jppelteret/homebrew-cask | Casks/multibit.rb | Ruby | bsd-2-clause | 378 |
// This file was procedurally generated from the following sources:
// - src/dstr-binding/obj-ptrn-id-init-skipped.case
// - src/dstr-binding/default/cls-decl-async-gen-meth-dflt.template
/*---
description: Destructuring initializer is not evaluated when value is not `undefined` (class expression async generator method (default parameters))
esid: sec-class-definitions-runtime-semantics-evaluation
features: [async-iteration]
flags: [generated, async]
info: |
ClassDeclaration : class BindingIdentifier ClassTail
1. Let className be StringValue of BindingIdentifier.
2. Let value be the result of ClassDefinitionEvaluation of ClassTail with
argument className.
[...]
14.5.14 Runtime Semantics: ClassDefinitionEvaluation
21. For each ClassElement m in order from methods
a. If IsStatic of m is false, then
i. Let status be the result of performing
PropertyDefinitionEvaluation for m with arguments proto and
false.
[...]
Runtime Semantics: PropertyDefinitionEvaluation
AsyncGeneratorMethod :
async [no LineTerminator here] * PropertyName ( UniqueFormalParameters )
{ AsyncGeneratorBody }
1. Let propKey be the result of evaluating PropertyName.
2. ReturnIfAbrupt(propKey).
3. If the function code for this AsyncGeneratorMethod is strict mode code, let strict be true.
Otherwise let strict be false.
4. Let scope be the running execution context's LexicalEnvironment.
5. Let closure be ! AsyncGeneratorFunctionCreate(Method, UniqueFormalParameters,
AsyncGeneratorBody, scope, strict).
[...]
13.3.3.7 Runtime Semantics: KeyedBindingInitialization
SingleNameBinding : BindingIdentifier Initializeropt
[...]
6. If Initializer is present and v is undefined, then
[...]
[...]
---*/
var initCount = 0;
function counter() {
initCount += 1;
}
var callCount = 0;
class C {
async *method({ w = counter(), x = counter(), y = counter(), z = counter() } = { w: null, x: 0, y: false, z: '' }) {
assert.sameValue(w, null);
assert.sameValue(x, 0);
assert.sameValue(y, false);
assert.sameValue(z, '');
assert.sameValue(initCount, 0);
callCount = callCount + 1;
}
};
new C().method().next().then(() => {
assert.sameValue(callCount, 1, 'invoked exactly once');
}).then($DONE, $DONE);
| sebastienros/jint | Jint.Tests.Test262/test/language/statements/class/dstr-async-gen-meth-dflt-obj-ptrn-id-init-skipped.js | JavaScript | bsd-2-clause | 2,395 |
/*
*@brief RDTSC implementation
*
*@date 22.10.2013
*
*
*/
#include <stdint.h>
#include <hal/cpu_info.h>
uint64_t get_cpu_counter(void) {
uint64_t hi = 0, lo = 0;
asm volatile ( "rdtsc\n\t"
"movl %%eax, %0\n\t"
"movl %%edx, %1\n\t" :
"=r"(lo), "=r"(hi) :);
return (hi << 32) + lo;
}
| gzoom13/embox | src/arch/x86/lib/cpu_performance/cpu_counter.c | C | bsd-2-clause | 310 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML3.2 EN">
<HTML>
<HEAD>
<META NAME="GENERATOR" CONTENT="DOCTEXT">
<TITLE>MPI_Ibsend</TITLE>
</HEAD>
<BODY BGCOLOR="FFFFFF">
<A NAME="MPI_Ibsend"><H1>MPI_Ibsend</H1></A>
Starts a nonblocking buffered send
<H2>Synopsis</H2>
<PRE>
int MPI_Ibsend(void *buf, int count, MPI_Datatype datatype, int dest, int tag,
MPI_Comm comm, MPI_Request *request)
</PRE>
<H2>Input Parameters</H2>
<DL>
<DT><B>buf </B><DD>initial address of send buffer (choice)
<DT><B>count </B><DD>number of elements in send buffer (integer)
<DT><B>datatype </B><DD>datatype of each send buffer element (handle)
<DT><B>dest </B><DD>rank of destination (integer)
<DT><B>tag </B><DD>message tag (integer)
<DT><B>comm </B><DD>communicator (handle)
</DL>
<P>
<H2>Output Parameter</H2>
<DL><DT><B>request </B> <DD> communication request (handle)
</DL>
<P>
<H2>Thread and Interrupt Safety</H2>
<P>
This routine is thread-safe. This means that this routine may be
safely used by multiple threads without the need for any user-provided
thread locks. However, the routine is not interrupt safe. Typically,
this is due to the use of memory allocation routines such as <TT>malloc
</TT>or other non-MPICH runtime routines that are themselves not interrupt-safe.
<P>
<H2>Notes for Fortran</H2>
All MPI routines in Fortran (except for <TT>MPI_WTIME</TT> and <TT>MPI_WTICK</TT>) have
an additional argument <TT>ierr</TT> at the end of the argument list. <TT>ierr
</TT>is an integer and has the same meaning as the return value of the routine
in C. In Fortran, MPI routines are subroutines, and are invoked with the
<TT>call</TT> statement.
<P>
All MPI objects (e.g., <TT>MPI_Datatype</TT>, <TT>MPI_Comm</TT>) are of type <TT>INTEGER
</TT>in Fortran.
<P>
<H2>Errors</H2>
<P>
All MPI routines (except <TT>MPI_Wtime</TT> and <TT>MPI_Wtick</TT>) return an error value;
C routines as the value of the function and Fortran routines in the last
argument. Before the value is returned, the current MPI error handler is
called. By default, this error handler aborts the MPI job. The error handler
may be changed with <TT>MPI_Comm_set_errhandler</TT> (for communicators),
<TT>MPI_File_set_errhandler</TT> (for files), and <TT>MPI_Win_set_errhandler</TT> (for
RMA windows). The MPI-1 routine <TT>MPI_Errhandler_set</TT> may be used but
its use is deprecated. The predefined error handler
<TT>MPI_ERRORS_RETURN</TT> may be used to cause error values to be returned.
Note that MPI does <EM>not</EM> guarentee that an MPI program can continue past
an error; however, MPI implementations will attempt to continue whenever
possible.
<P>
<DL><DT><B>MPI_SUCCESS </B> <DD> No error; MPI routine completed successfully.
</DL>
<DL><DT><B>MPI_ERR_COMM </B> <DD> Invalid communicator. A common error is to use a null
communicator in a call (not even allowed in <TT>MPI_Comm_rank</TT>).
</DL>
<DL><DT><B>MPI_ERR_COUNT </B> <DD> Invalid count argument. Count arguments must be
non-negative; a count of zero is often valid.
</DL>
<DL><DT><B>MPI_ERR_TYPE </B> <DD> Invalid datatype argument. May be an uncommitted
MPI_Datatype (see <TT>MPI_Type_commit</TT>).
</DL>
<DL><DT><B>MPI_ERR_TAG </B> <DD> Invalid tag argument. Tags must be non-negative; tags
in a receive (<TT>MPI_Recv</TT>, <TT>MPI_Irecv</TT>, <TT>MPI_Sendrecv</TT>, etc.) may
also be <TT>MPI_ANY_TAG</TT>. The largest tag value is available through the
the attribute <TT>MPI_TAG_UB</TT>.
</DL>
<DL><DT><B>MPI_ERR_RANK </B> <DD> Invalid source or destination rank. Ranks must be between
zero and the size of the communicator minus one; ranks in a receive
(<TT>MPI_Recv</TT>, <TT>MPI_Irecv</TT>, <TT>MPI_Sendrecv</TT>, etc.) may also be <TT>MPI_ANY_SOURCE</TT>.
</DL>
<DL><DT><B>MPI_ERR_BUFFER </B> <DD> Invalid buffer pointer. Usually a null buffer where
one is not valid.
</DL>
<P>
<P><B>Location:</B>ibsend.c<P>
</BODY></HTML>
| hydrosolutions/model_RRMDA_Themi | java/resources/linux64_gnu/share/doc/www3/MPI_Ibsend.html | HTML | bsd-2-clause | 3,899 |
/*
* Copyright (c) 2005 Topspin Communications. All rights reserved.
* Copyright (c) 2005 Mellanox Technologies Ltd. All rights reserved.
* Copyright (c) 2006 Cisco Systems. All rights reserved.
*
* This software is available to you under a choice of one of two
* licenses. You may choose to be licensed under the terms of the GNU
* General Public License (GPL) Version 2, available from the file
* COPYING in the main directory of this source tree, or the
* OpenIB.org BSD license below:
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#if HAVE_CONFIG_H
# include <config.h>
#endif /* HAVE_CONFIG_H */
#include <stdio.h>
#include <stdlib.h>
#include <pthread.h>
#include <netinet/in.h>
#include <string.h>
#include <infiniband/opcode.h>
#include "mthca.h"
#include "doorbell.h"
enum {
MTHCA_CQ_DOORBELL = 0x20
};
enum {
CQ_OK = 0,
CQ_EMPTY = -1,
CQ_POLL_ERR = -2
};
#define MTHCA_TAVOR_CQ_DB_INC_CI (1 << 24)
#define MTHCA_TAVOR_CQ_DB_REQ_NOT (2 << 24)
#define MTHCA_TAVOR_CQ_DB_REQ_NOT_SOL (3 << 24)
#define MTHCA_TAVOR_CQ_DB_SET_CI (4 << 24)
#define MTHCA_TAVOR_CQ_DB_REQ_NOT_MULT (5 << 24)
#define MTHCA_ARBEL_CQ_DB_REQ_NOT_SOL (1 << 24)
#define MTHCA_ARBEL_CQ_DB_REQ_NOT (2 << 24)
#define MTHCA_ARBEL_CQ_DB_REQ_NOT_MULT (3 << 24)
enum {
MTHCA_CQ_ENTRY_OWNER_SW = 0x00,
MTHCA_CQ_ENTRY_OWNER_HW = 0x80,
MTHCA_ERROR_CQE_OPCODE_MASK = 0xfe
};
enum {
SYNDROME_LOCAL_LENGTH_ERR = 0x01,
SYNDROME_LOCAL_QP_OP_ERR = 0x02,
SYNDROME_LOCAL_EEC_OP_ERR = 0x03,
SYNDROME_LOCAL_PROT_ERR = 0x04,
SYNDROME_WR_FLUSH_ERR = 0x05,
SYNDROME_MW_BIND_ERR = 0x06,
SYNDROME_BAD_RESP_ERR = 0x10,
SYNDROME_LOCAL_ACCESS_ERR = 0x11,
SYNDROME_REMOTE_INVAL_REQ_ERR = 0x12,
SYNDROME_REMOTE_ACCESS_ERR = 0x13,
SYNDROME_REMOTE_OP_ERR = 0x14,
SYNDROME_RETRY_EXC_ERR = 0x15,
SYNDROME_RNR_RETRY_EXC_ERR = 0x16,
SYNDROME_LOCAL_RDD_VIOL_ERR = 0x20,
SYNDROME_REMOTE_INVAL_RD_REQ_ERR = 0x21,
SYNDROME_REMOTE_ABORTED_ERR = 0x22,
SYNDROME_INVAL_EECN_ERR = 0x23,
SYNDROME_INVAL_EEC_STATE_ERR = 0x24
};
struct mthca_cqe {
uint32_t my_qpn;
uint32_t my_ee;
uint32_t rqpn;
uint16_t sl_g_mlpath;
uint16_t rlid;
uint32_t imm_etype_pkey_eec;
uint32_t byte_cnt;
uint32_t wqe;
uint8_t opcode;
uint8_t is_send;
uint8_t reserved;
uint8_t owner;
};
struct mthca_err_cqe {
uint32_t my_qpn;
uint32_t reserved1[3];
uint8_t syndrome;
uint8_t vendor_err;
uint16_t db_cnt;
uint32_t reserved2;
uint32_t wqe;
uint8_t opcode;
uint8_t reserved3[2];
uint8_t owner;
};
static inline struct mthca_cqe *get_cqe(struct mthca_cq *cq, int entry)
{
return cq->buf.buf + entry * MTHCA_CQ_ENTRY_SIZE;
}
static inline struct mthca_cqe *cqe_sw(struct mthca_cq *cq, int i)
{
struct mthca_cqe *cqe = get_cqe(cq, i);
return MTHCA_CQ_ENTRY_OWNER_HW & cqe->owner ? NULL : cqe;
}
static inline struct mthca_cqe *next_cqe_sw(struct mthca_cq *cq)
{
return cqe_sw(cq, cq->cons_index & cq->ibv_cq.cqe);
}
static inline void set_cqe_hw(struct mthca_cqe *cqe)
{
VALGRIND_MAKE_MEM_UNDEFINED(cqe, sizeof *cqe);
cqe->owner = MTHCA_CQ_ENTRY_OWNER_HW;
}
/*
* incr is ignored in native Arbel (mem-free) mode, so cq->cons_index
* should be correct before calling update_cons_index().
*/
static inline void update_cons_index(struct mthca_cq *cq, int incr)
{
uint32_t doorbell[2];
if (mthca_is_memfree(cq->ibv_cq.context)) {
*cq->set_ci_db = htonl(cq->cons_index);
wmb();
} else {
doorbell[0] = htonl(MTHCA_TAVOR_CQ_DB_INC_CI | cq->cqn);
doorbell[1] = htonl(incr - 1);
mthca_write64(doorbell, to_mctx(cq->ibv_cq.context), MTHCA_CQ_DOORBELL);
}
}
static void dump_cqe(void *cqe_ptr)
{
uint32_t *cqe = cqe_ptr;
int i;
for (i = 0; i < 8; ++i)
printf(" [%2x] %08x\n", i * 4, ntohl(((uint32_t *) cqe)[i]));
}
static int handle_error_cqe(struct mthca_cq *cq,
struct mthca_qp *qp, int wqe_index, int is_send,
struct mthca_err_cqe *cqe,
struct ibv_wc *wc, int *free_cqe)
{
int err;
int dbd;
uint32_t new_wqe;
if (cqe->syndrome == SYNDROME_LOCAL_QP_OP_ERR) {
printf("local QP operation err "
"(QPN %06x, WQE @ %08x, CQN %06x, index %d)\n",
ntohl(cqe->my_qpn), ntohl(cqe->wqe),
cq->cqn, cq->cons_index);
dump_cqe(cqe);
}
/*
* For completions in error, only work request ID, status, vendor error
* (and freed resource count for RD) have to be set.
*/
switch (cqe->syndrome) {
case SYNDROME_LOCAL_LENGTH_ERR:
wc->status = IBV_WC_LOC_LEN_ERR;
break;
case SYNDROME_LOCAL_QP_OP_ERR:
wc->status = IBV_WC_LOC_QP_OP_ERR;
break;
case SYNDROME_LOCAL_EEC_OP_ERR:
wc->status = IBV_WC_LOC_EEC_OP_ERR;
break;
case SYNDROME_LOCAL_PROT_ERR:
wc->status = IBV_WC_LOC_PROT_ERR;
break;
case SYNDROME_WR_FLUSH_ERR:
wc->status = IBV_WC_WR_FLUSH_ERR;
break;
case SYNDROME_MW_BIND_ERR:
wc->status = IBV_WC_MW_BIND_ERR;
break;
case SYNDROME_BAD_RESP_ERR:
wc->status = IBV_WC_BAD_RESP_ERR;
break;
case SYNDROME_LOCAL_ACCESS_ERR:
wc->status = IBV_WC_LOC_ACCESS_ERR;
break;
case SYNDROME_REMOTE_INVAL_REQ_ERR:
wc->status = IBV_WC_REM_INV_REQ_ERR;
break;
case SYNDROME_REMOTE_ACCESS_ERR:
wc->status = IBV_WC_REM_ACCESS_ERR;
break;
case SYNDROME_REMOTE_OP_ERR:
wc->status = IBV_WC_REM_OP_ERR;
break;
case SYNDROME_RETRY_EXC_ERR:
wc->status = IBV_WC_RETRY_EXC_ERR;
break;
case SYNDROME_RNR_RETRY_EXC_ERR:
wc->status = IBV_WC_RNR_RETRY_EXC_ERR;
break;
case SYNDROME_LOCAL_RDD_VIOL_ERR:
wc->status = IBV_WC_LOC_RDD_VIOL_ERR;
break;
case SYNDROME_REMOTE_INVAL_RD_REQ_ERR:
wc->status = IBV_WC_REM_INV_RD_REQ_ERR;
break;
case SYNDROME_REMOTE_ABORTED_ERR:
wc->status = IBV_WC_REM_ABORT_ERR;
break;
case SYNDROME_INVAL_EECN_ERR:
wc->status = IBV_WC_INV_EECN_ERR;
break;
case SYNDROME_INVAL_EEC_STATE_ERR:
wc->status = IBV_WC_INV_EEC_STATE_ERR;
break;
default:
wc->status = IBV_WC_GENERAL_ERR;
break;
}
wc->vendor_err = cqe->vendor_err;
/*
* Mem-free HCAs always generate one CQE per WQE, even in the
* error case, so we don't have to check the doorbell count, etc.
*/
if (mthca_is_memfree(cq->ibv_cq.context))
return 0;
err = mthca_free_err_wqe(qp, is_send, wqe_index, &dbd, &new_wqe);
if (err)
return err;
/*
* If we're at the end of the WQE chain, or we've used up our
* doorbell count, free the CQE. Otherwise just update it for
* the next poll operation.
*
* This doesn't apply to mem-free HCAs, which never use the
* doorbell count field. In that case we always free the CQE.
*/
if (mthca_is_memfree(cq->ibv_cq.context) ||
!(new_wqe & htonl(0x3f)) || (!cqe->db_cnt && dbd))
return 0;
cqe->db_cnt = htons(ntohs(cqe->db_cnt) - dbd);
cqe->wqe = new_wqe;
cqe->syndrome = SYNDROME_WR_FLUSH_ERR;
*free_cqe = 0;
return 0;
}
static inline int mthca_poll_one(struct mthca_cq *cq,
struct mthca_qp **cur_qp,
int *freed,
struct ibv_wc *wc)
{
struct mthca_wq *wq;
struct mthca_cqe *cqe;
struct mthca_srq *srq;
uint32_t qpn;
uint32_t wqe;
int wqe_index;
int is_error;
int is_send;
int free_cqe = 1;
int err = 0;
cqe = next_cqe_sw(cq);
if (!cqe)
return CQ_EMPTY;
VALGRIND_MAKE_MEM_DEFINED(cqe, sizeof *cqe);
/*
* Make sure we read CQ entry contents after we've checked the
* ownership bit.
*/
rmb();
qpn = ntohl(cqe->my_qpn);
is_error = (cqe->opcode & MTHCA_ERROR_CQE_OPCODE_MASK) ==
MTHCA_ERROR_CQE_OPCODE_MASK;
is_send = is_error ? cqe->opcode & 0x01 : cqe->is_send & 0x80;
if (!*cur_qp || ntohl(cqe->my_qpn) != (*cur_qp)->ibv_qp.qp_num) {
/*
* We do not have to take the QP table lock here,
* because CQs will be locked while QPs are removed
* from the table.
*/
*cur_qp = mthca_find_qp(to_mctx(cq->ibv_cq.context), ntohl(cqe->my_qpn));
if (!*cur_qp) {
err = CQ_POLL_ERR;
goto out;
}
}
wc->qp_num = (*cur_qp)->ibv_qp.qp_num;
if (is_send) {
wq = &(*cur_qp)->sq;
wqe_index = ((ntohl(cqe->wqe) - (*cur_qp)->send_wqe_offset) >> wq->wqe_shift);
wc->wr_id = (*cur_qp)->wrid[wqe_index + (*cur_qp)->rq.max];
} else if ((*cur_qp)->ibv_qp.srq) {
srq = to_msrq((*cur_qp)->ibv_qp.srq);
wqe = htonl(cqe->wqe);
wq = NULL;
wqe_index = wqe >> srq->wqe_shift;
wc->wr_id = srq->wrid[wqe_index];
mthca_free_srq_wqe(srq, wqe_index);
} else {
int32_t wqe;
wq = &(*cur_qp)->rq;
wqe = ntohl(cqe->wqe);
wqe_index = wqe >> wq->wqe_shift;
/*
* WQE addr == base - 1 might be reported by Sinai FW
* 1.0.800 and Arbel FW 5.1.400 in receive completion
* with error instead of (rq size - 1). This bug
* should be fixed in later FW revisions.
*/
if (wqe_index < 0)
wqe_index = wq->max - 1;
wc->wr_id = (*cur_qp)->wrid[wqe_index];
}
if (wq) {
if (wq->last_comp < wqe_index)
wq->tail += wqe_index - wq->last_comp;
else
wq->tail += wqe_index + wq->max - wq->last_comp;
wq->last_comp = wqe_index;
}
if (is_error) {
err = handle_error_cqe(cq, *cur_qp, wqe_index, is_send,
(struct mthca_err_cqe *) cqe,
wc, &free_cqe);
goto out;
}
if (is_send) {
wc->wc_flags = 0;
switch (cqe->opcode) {
case MTHCA_OPCODE_RDMA_WRITE:
wc->opcode = IBV_WC_RDMA_WRITE;
break;
case MTHCA_OPCODE_RDMA_WRITE_IMM:
wc->opcode = IBV_WC_RDMA_WRITE;
wc->wc_flags |= IBV_WC_WITH_IMM;
break;
case MTHCA_OPCODE_SEND:
wc->opcode = IBV_WC_SEND;
break;
case MTHCA_OPCODE_SEND_IMM:
wc->opcode = IBV_WC_SEND;
wc->wc_flags |= IBV_WC_WITH_IMM;
break;
case MTHCA_OPCODE_RDMA_READ:
wc->opcode = IBV_WC_RDMA_READ;
wc->byte_len = ntohl(cqe->byte_cnt);
break;
case MTHCA_OPCODE_ATOMIC_CS:
wc->opcode = IBV_WC_COMP_SWAP;
wc->byte_len = ntohl(cqe->byte_cnt);
break;
case MTHCA_OPCODE_ATOMIC_FA:
wc->opcode = IBV_WC_FETCH_ADD;
wc->byte_len = ntohl(cqe->byte_cnt);
break;
case MTHCA_OPCODE_BIND_MW:
wc->opcode = IBV_WC_BIND_MW;
break;
default:
/* assume it's a send completion */
wc->opcode = IBV_WC_SEND;
break;
}
} else {
wc->byte_len = ntohl(cqe->byte_cnt);
switch (cqe->opcode & 0x1f) {
case IBV_OPCODE_SEND_LAST_WITH_IMMEDIATE:
case IBV_OPCODE_SEND_ONLY_WITH_IMMEDIATE:
wc->wc_flags = IBV_WC_WITH_IMM;
wc->imm_data = cqe->imm_etype_pkey_eec;
wc->opcode = IBV_WC_RECV;
break;
case IBV_OPCODE_RDMA_WRITE_LAST_WITH_IMMEDIATE:
case IBV_OPCODE_RDMA_WRITE_ONLY_WITH_IMMEDIATE:
wc->wc_flags = IBV_WC_WITH_IMM;
wc->imm_data = cqe->imm_etype_pkey_eec;
wc->opcode = IBV_WC_RECV_RDMA_WITH_IMM;
break;
default:
wc->wc_flags = 0;
wc->opcode = IBV_WC_RECV;
break;
}
wc->slid = ntohs(cqe->rlid);
wc->sl = ntohs(cqe->sl_g_mlpath) >> 12;
wc->src_qp = ntohl(cqe->rqpn) & 0xffffff;
wc->dlid_path_bits = ntohs(cqe->sl_g_mlpath) & 0x7f;
wc->pkey_index = ntohl(cqe->imm_etype_pkey_eec) >> 16;
wc->wc_flags |= ntohs(cqe->sl_g_mlpath) & 0x80 ?
IBV_WC_GRH : 0;
}
wc->status = IBV_WC_SUCCESS;
out:
if (free_cqe) {
set_cqe_hw(cqe);
++(*freed);
++cq->cons_index;
}
return err;
}
int mthca_poll_cq(struct ibv_cq *ibcq, int ne, struct ibv_wc *wc)
{
struct mthca_cq *cq = to_mcq(ibcq);
struct mthca_qp *qp = NULL;
int npolled;
int err = CQ_OK;
int freed = 0;
pthread_spin_lock(&cq->lock);
for (npolled = 0; npolled < ne; ++npolled) {
err = mthca_poll_one(cq, &qp, &freed, wc + npolled);
if (err != CQ_OK)
break;
}
if (freed) {
wmb();
update_cons_index(cq, freed);
}
pthread_spin_unlock(&cq->lock);
return err == CQ_POLL_ERR ? err : npolled;
}
int mthca_tavor_arm_cq(struct ibv_cq *cq, int solicited)
{
uint32_t doorbell[2];
doorbell[0] = htonl((solicited ?
MTHCA_TAVOR_CQ_DB_REQ_NOT_SOL :
MTHCA_TAVOR_CQ_DB_REQ_NOT) |
to_mcq(cq)->cqn);
doorbell[1] = 0xffffffff;
mthca_write64(doorbell, to_mctx(cq->context), MTHCA_CQ_DOORBELL);
return 0;
}
int mthca_arbel_arm_cq(struct ibv_cq *ibvcq, int solicited)
{
struct mthca_cq *cq = to_mcq(ibvcq);
uint32_t doorbell[2];
uint32_t sn;
uint32_t ci;
sn = cq->arm_sn & 3;
ci = htonl(cq->cons_index);
doorbell[0] = ci;
doorbell[1] = htonl((cq->cqn << 8) | (2 << 5) | (sn << 3) |
(solicited ? 1 : 2));
mthca_write_db_rec(doorbell, cq->arm_db);
/*
* Make sure that the doorbell record in host memory is
* written before ringing the doorbell via PCI MMIO.
*/
wmb();
doorbell[0] = htonl((sn << 28) |
(solicited ?
MTHCA_ARBEL_CQ_DB_REQ_NOT_SOL :
MTHCA_ARBEL_CQ_DB_REQ_NOT) |
cq->cqn);
doorbell[1] = ci;
mthca_write64(doorbell, to_mctx(ibvcq->context), MTHCA_CQ_DOORBELL);
return 0;
}
void mthca_arbel_cq_event(struct ibv_cq *cq)
{
to_mcq(cq)->arm_sn++;
}
static inline int is_recv_cqe(struct mthca_cqe *cqe)
{
if ((cqe->opcode & MTHCA_ERROR_CQE_OPCODE_MASK) ==
MTHCA_ERROR_CQE_OPCODE_MASK)
return !(cqe->opcode & 0x01);
else
return !(cqe->is_send & 0x80);
}
void __mthca_cq_clean(struct mthca_cq *cq, uint32_t qpn, struct mthca_srq *srq)
{
struct mthca_cqe *cqe;
uint32_t prod_index;
int i, nfreed = 0;
/*
* First we need to find the current producer index, so we
* know where to start cleaning from. It doesn't matter if HW
* adds new entries after this loop -- the QP we're worried
* about is already in RESET, so the new entries won't come
* from our QP and therefore don't need to be checked.
*/
for (prod_index = cq->cons_index;
cqe_sw(cq, prod_index & cq->ibv_cq.cqe);
++prod_index)
if (prod_index == cq->cons_index + cq->ibv_cq.cqe)
break;
/*
* Now sweep backwards through the CQ, removing CQ entries
* that match our QP by copying older entries on top of them.
*/
while ((int) --prod_index - (int) cq->cons_index >= 0) {
cqe = get_cqe(cq, prod_index & cq->ibv_cq.cqe);
if (cqe->my_qpn == htonl(qpn)) {
if (srq && is_recv_cqe(cqe))
mthca_free_srq_wqe(srq,
ntohl(cqe->wqe) >> srq->wqe_shift);
++nfreed;
} else if (nfreed)
memcpy(get_cqe(cq, (prod_index + nfreed) & cq->ibv_cq.cqe),
cqe, MTHCA_CQ_ENTRY_SIZE);
}
if (nfreed) {
for (i = 0; i < nfreed; ++i)
set_cqe_hw(get_cqe(cq, (cq->cons_index + i) & cq->ibv_cq.cqe));
wmb();
cq->cons_index += nfreed;
update_cons_index(cq, nfreed);
}
}
void mthca_cq_clean(struct mthca_cq *cq, uint32_t qpn, struct mthca_srq *srq)
{
pthread_spin_lock(&cq->lock);
__mthca_cq_clean(cq, qpn, srq);
pthread_spin_unlock(&cq->lock);
}
void mthca_cq_resize_copy_cqes(struct mthca_cq *cq, void *buf, int old_cqe)
{
int i;
/*
* In Tavor mode, the hardware keeps the consumer and producer
* indices mod the CQ size. Since we might be making the CQ
* bigger, we need to deal with the case where the producer
* index wrapped around before the CQ was resized.
*/
if (!mthca_is_memfree(cq->ibv_cq.context) && old_cqe < cq->ibv_cq.cqe) {
cq->cons_index &= old_cqe;
if (cqe_sw(cq, old_cqe))
cq->cons_index -= old_cqe + 1;
}
for (i = cq->cons_index; cqe_sw(cq, i & old_cqe); ++i)
memcpy(buf + (i & cq->ibv_cq.cqe) * MTHCA_CQ_ENTRY_SIZE,
get_cqe(cq, i & old_cqe), MTHCA_CQ_ENTRY_SIZE);
}
int mthca_alloc_cq_buf(struct mthca_device *dev, struct mthca_buf *buf, int nent)
{
int i;
if (mthca_alloc_buf(buf, align(nent * MTHCA_CQ_ENTRY_SIZE, dev->page_size),
dev->page_size))
return -1;
for (i = 0; i < nent; ++i)
((struct mthca_cqe *) buf->buf)[i].owner = MTHCA_CQ_ENTRY_OWNER_HW;
return 0;
}
| dplbsd/soc2013 | head/contrib/ofed/libmthca/src/cq.c | C | bsd-2-clause | 16,469 |
{-# LANGUAGE OverloadedStrings, TupleSections #-}
-- | Parser components for the ROS message description language (@msg@
-- files). See http://wiki.ros.org/msg for reference.
module Parse (parseMsg, parseSrv, simpleFieldAssoc) where
import Prelude hiding (takeWhile)
import Control.Applicative
import Control.Arrow ((&&&))
import Data.Attoparsec.ByteString.Char8
import Data.ByteString (ByteString)
import Data.ByteString.Char8 (pack, unpack)
import qualified Data.ByteString.Char8 as B
import Data.Char (toLower, digitToInt)
import Data.Either (partitionEithers)
import Data.List (foldl')
import System.FilePath (dropExtension, takeFileName, splitDirectories)
import Types
simpleFieldTypes :: [MsgType]
simpleFieldTypes = [ RBool, RInt8, RUInt8, RInt16, RUInt16, RInt32, RUInt32,
RInt64, RUInt64, RFloat32, RFloat64, RString,
RTime, RDuration, RByte, RChar ]
simpleFieldAssoc :: [(MsgType, ByteString)]
simpleFieldAssoc = map (id &&& B.pack . map toLower . tail . show)
simpleFieldTypes
eatLine :: Parser ()
eatLine = manyTill anyChar (eitherP endOfLine endOfInput) *> skipSpace
parseName :: Parser ByteString
parseName = skipSpace *> identifier <* eatLine <* try comment
identifier :: Parser ByteString
identifier = B.cons <$> letter_ascii <*> takeWhile validChar
where validChar c = any ($ c) [isDigit, isAlpha_ascii, (== '_'), (== '/')]
parseInt :: Parser Int
parseInt = foldl' (\s x -> s*10 + digitToInt x) 0 <$> many1 digit
comment :: Parser [()]
comment = many $ skipSpace *> try (char '#' *> eatLine)
simpleParser :: (MsgType, ByteString) -> Parser (ByteString, MsgType)
simpleParser (t,b) = (, t) <$> (string b *> space *> parseName)
fixedArrayParser :: (MsgType, ByteString) -> Parser (ByteString, MsgType)
fixedArrayParser (t,b) = (\len name -> (name, RFixedArray len t)) <$>
(string b *> char '[' *> parseInt <* char ']') <*>
(space *> parseName)
varArrayParser :: (MsgType, ByteString) -> Parser (ByteString, MsgType)
varArrayParser (t,b) = (, RVarArray t) <$>
(string b *> string "[]" *> space *> parseName)
userTypeParser :: Parser (ByteString, MsgType)
userTypeParser = choice [userSimple, userVarArray, userFixedArray]
userSimple :: Parser (ByteString, MsgType)
userSimple = (\t name -> (name, RUserType t)) <$>
identifier <*> (space *> parseName)
userVarArray :: Parser (ByteString, MsgType)
userVarArray = (\t name -> (name, RVarArray (RUserType t))) <$>
identifier <*> (string "[]" *> space *> parseName)
userFixedArray :: Parser (ByteString, MsgType)
userFixedArray = (\t n name -> (name, RFixedArray n (RUserType t))) <$>
identifier <*>
(char '[' *> parseInt <* char ']') <*>
(space *> parseName)
-- Parse constants defined in the message
constParser :: ByteString -> MsgType ->
Parser (ByteString, MsgType, ByteString)
constParser s x = (,x,) <$>
(string s *> space *> identifier) <*>
(skipSpace *> char '=' *> skipSpace *> restOfLine <* skipSpace)
where restOfLine :: Parser ByteString
restOfLine = pack <$> manyTill anyChar (eitherP endOfLine endOfInput)
constParsers :: [Parser (ByteString, MsgType, ByteString)]
constParsers = map (uncurry constParser . swap) simpleFieldAssoc
where swap (x,y) = (y,x)
-- String constants are parsed somewhat differently from numeric
-- constants. For numerical constants, we drop comments and trailing
-- spaces. For strings, we take the whole line (so comments aren't
-- stripped).
sanitizeConstants :: (a, MsgType, ByteString) -> (a, MsgType, ByteString)
sanitizeConstants c@(_, RString, _) = c
sanitizeConstants (name, t, val) =
(name, t, B.takeWhile (\c -> c /= '#' && not (isSpace c)) val)
-- Parsers fields and constants.
fieldParsers :: [Parser (Either (ByteString, MsgType)
(ByteString, MsgType, ByteString))]
fieldParsers = map (comment *>) $
map (Right . sanitizeConstants <$>) constParsers ++
map (Left <$>) (builtIns ++ [userTypeParser])
where builtIns = concatMap (`map` simpleFieldAssoc)
[simpleParser, fixedArrayParser, varArrayParser]
mkParser :: MsgName -> String -> ByteString -> Parser Msg
mkParser sname lname txt = aux . partitionEithers <$> many (choice fieldParsers)
where aux (fs, cs) = Msg sname lname txt
(map buildField fs)
(map buildConst cs)
buildField :: (ByteString, MsgType) -> MsgField
buildField (name,typ) = MsgField fname typ name
where fname = B.append "_" $ sanitize name
buildConst :: (ByteString, MsgType, ByteString) -> MsgConst
buildConst (name,typ,val) = MsgConst fname typ val name
where fname = B.map toLower $ sanitize name
{-
testMsg :: ByteString
testMsg = "# Foo bar\n\n# \nHeader header # a header\nuint32 aNum # a number \n # It's not important\ngeometry_msgs/PoseStamped[] poses\nbyte DEBUG=1 #debug level\n"
test :: Result Msg
test = feed (parse (comment *> (mkParser "" "" testMsg)) testMsg) ""
-}
-- Ensure that field and constant names are valid Haskell identifiers
-- and do not coincide with Haskell reserved words.
sanitize :: ByteString -> ByteString
sanitize "data" = "_data"
sanitize "type" = "_type"
sanitize "class" = "_class"
sanitize "module" = "_module"
sanitize x = B.cons (toLower (B.head x)) (B.tail x)
pkgName :: FilePath -> String
pkgName f = let parts = splitDirectories f
[pkg,_,_msgFile] = drop (length parts - 3) parts
in pkg
parseMsg :: FilePath -> IO (Either String Msg)
parseMsg fname = do msgFile <- B.readFile fname
let tName = msgName . dropExtension . takeFileName $ fname
packageName = pkgName fname
return $ parseMsgWithName tName packageName msgFile
parseMsgWithName :: MsgName -> String -> ByteString -> Either String Msg
parseMsgWithName name packageName msgFile =
case feed (parse parser msgFile) "" of
Done leftOver msg
| B.null leftOver -> Right msg
| otherwise -> Left $ "Couldn't parse " ++
unpack leftOver
Fail _ _ctxt err -> Left err
Partial _ -> Left "Incomplete msg definition"
where
parser = comment *> mkParser name packageName msgFile
-- | Parse a service file by splitting the file into a request and a response
-- | and parsing each part separately.
parseSrv :: FilePath -> IO (Either String Srv)
parseSrv fname = do
srvFile <- B.readFile fname
let (request, response) = splitService srvFile
packageName = pkgName fname
rawServiceName = dropExtension . takeFileName $ fname
return $ do
rqst <- parseMsgWithName (requestMsgName rawServiceName) packageName request
resp <- parseMsgWithName (responseMsgName rawServiceName) packageName response
return Srv{srvRequest = rqst
, srvResponse = resp
, srvName = msgName rawServiceName
, srvPackage = packageName
, srvSource = srvFile}
splitService :: ByteString -> (ByteString, ByteString)
splitService service = (request, response) where
-- divider does not include newlines to allow it match even
-- if there is no request or response message
divider = "---"
(request, dividerAndResponse) = B.breakSubstring divider service
--Add 1 to the length of the divider to remove newline
response = B.drop (1 + B.length divider) dividerAndResponse
| bitemyapp/roshask | src/executable/Parse.hs | Haskell | bsd-3-clause | 7,585 |
"""Tkinker gui for pylint"""
from Tkinter import Tk, Frame, Listbox, Entry, Label, Button, Scrollbar
from Tkinter import TOP, LEFT, RIGHT, BOTTOM, END, X, Y, BOTH
import os
import sys
if sys.platform.startswith('win'):
PYLINT = 'pylint.bat'
else:
PYLINT = 'pylint'
class LintGui:
"""Build and control a window to interact with pylint"""
def __init__(self, root=None):
self.root = root or Tk()
self.root.title('Pylint')
top_frame = Frame(self.root)
res_frame = Frame(self.root)
btn_frame = Frame(self.root)
top_frame.pack(side=TOP, fill=X)
res_frame.pack(side=TOP, fill=BOTH, expand=True)
btn_frame.pack(side=TOP, fill=X)
Label(top_frame, text='Module or package').pack(side=LEFT)
self.txtModule = Entry(top_frame, background='white')
self.txtModule.bind('<Return>', self.run_lint)
self.txtModule.pack(side=LEFT, expand=True, fill=X)
Button(top_frame, text='Run', command=self.run_lint).pack(side=LEFT)
scrl = Scrollbar(res_frame)
self.results = Listbox(res_frame,
background='white',
font='fixedsys',
selectmode='browse',
yscrollcommand=scrl.set)
scrl.configure(command=self.results.yview)
self.results.pack(side=LEFT, expand=True, fill=BOTH)
scrl.pack(side=RIGHT, fill=Y)
Button(btn_frame, text='Quit', command=self.quit).pack(side=BOTTOM)
#self.root.bind('<ctrl-q>', self.quit)
self.txtModule.focus_set()
def mainloop(self):
"""launch the mainloop of the application"""
self.root.mainloop()
def quit(self, _=None):
"""quit the application"""
self.root.quit()
def run_lint(self, _=None):
"""launches pylint"""
colors = {'W:':'red1', 'E:': 'red4',
'W:': 'red3', '**': 'navy'}
self.root.configure(cursor='watch')
self.results.focus_set()
self.results.delete(0, END)
self.results.update()
module = self.txtModule.get()
pout = os.popen('%s %s' % (PYLINT, module), 'r')
for line in pout.xreadlines():
line = line.rstrip()
self.results.insert(END, line)
fg_color = colors.get(line[:2], 'black')
self.results.itemconfigure(END, fg=fg_color)
self.results.update()
self.root.configure(cursor='')
def Run(args):
"""launch pylint gui from args"""
if args:
print 'USAGE: pylint-gui\n launch a simple pylint gui using Tk'
return
gui = LintGui()
gui.mainloop()
if __name__ == '__main__':
Run(sys.argv[1:])
| dbbhattacharya/kitsune | vendor/packages/pylint/gui.py | Python | bsd-3-clause | 2,790 |
<?php
$this->breadcrumbs = [
Yii::t('StoreModule.category', 'Categories') => ['index'],
$model->name,
];
$this->pageTitle = Yii::t('StoreModule.category', 'Categories - view');
$this->menu = [
['icon' => 'fa fa-fw fa-list-alt', 'label' => Yii::t('StoreModule.category', 'Manage categories'), 'url' => ['/store/categoryBackend/index']],
['icon' => 'fa fa-fw fa-plus-square', 'label' => Yii::t('StoreModule.category', 'Create category'), 'url' => ['/store/categoryBackend/create']],
['label' => Yii::t('StoreModule.category', 'Category') . ' «' . mb_substr($model->name, 0, 32) . '»'],
[
'icon' => 'fa fa-fw fa-pencil',
'label' => Yii::t('StoreModule.category', 'Update category'),
'url' => [
'/store/categoryBackend/update',
'id' => $model->id
]
],
[
'icon' => 'fa fa-fw fa-eye',
'label' => Yii::t('StoreModule.category', 'View category'),
'url' => [
'/store/categoryBackend/view',
'id' => $model->id
]
],
[
'icon' => 'fa fa-fw fa-trash-o',
'label' => Yii::t('StoreModule.category', 'Delete category'),
'url' => '#',
'linkOptions' => [
'submit' => ['/store/categoryBackend/delete', 'id' => $model->id],
'params' => [Yii::app()->getRequest()->csrfTokenName => Yii::app()->getRequest()->csrfToken],
'confirm' => Yii::t('StoreModule.category', 'Do you really want to remove category?'),
'csrf' => true,
]
],
];
?>
<div class="page-header">
<h1>
<?php echo Yii::t('StoreModule.category', 'Viewing category'); ?><br/>
<small>«<?php echo $model->name; ?>»</small>
</h1>
</div>
<?php $this->widget(
'bootstrap.widgets.TbDetailView',
[
'data' => $model,
'attributes' => [
'id',
[
'name' => 'parent_id',
'value' => $model->getParentName(),
],
'name',
'slug',
[
'name' => 'image',
'type' => 'raw',
'value' => $model->image ? CHtml::image($model->getImageUrl(200, 200), $model->name) : '---',
],
[
'name' => 'description',
'type' => 'raw'
],
[
'name' => 'short_description',
'type' => 'raw'
],
[
'name' => 'status',
'value' => $model->getStatus(),
],
],
]
); ?>
| mettoff/archive | protected/modules/store/views/categoryBackend/view.php | PHP | bsd-3-clause | 2,603 |
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
* @flow
*/
'use strict';
const EventEmitter = require('../../vendor/emitter/EventEmitter');
const RCTDeviceEventEmitter = require('../RCTDeviceEventEmitter');
/**
* Mock the NativeEventEmitter as a normal JS EventEmitter.
*/
class NativeEventEmitter extends EventEmitter {
constructor() {
super(RCTDeviceEventEmitter.sharedSubscriber);
}
}
module.exports = NativeEventEmitter;
| exponentjs/react-native | Libraries/EventEmitter/__mocks__/NativeEventEmitter.js | JavaScript | bsd-3-clause | 592 |
/* -----------------------------------------------------------------------------
*
* (c) The GHC Team, 1998-2005
*
* Statistics and timing-related functions.
*
* ---------------------------------------------------------------------------*/
#ifndef STATS_H
#define STATS_H
#include "GetTime.h"
#include "BeginPrivate.h"
#if defined(mingw32_HOST_OS)
/* On Win64, if we say "printf" then gcc thinks we are going to use
MS format specifiers like %I64d rather than %llu */
#define PRINTF gnu_printf
#else
/* However, on OS X, "gnu_printf" isn't recognised */
#define PRINTF printf
#endif
struct gc_thread_;
void stat_startInit(void);
void stat_endInit(void);
void stat_startGCSync(struct gc_thread_ *_gct);
void stat_startGC(Capability *cap, struct gc_thread_ *_gct);
void stat_endGC (Capability *cap, struct gc_thread_ *_gct, W_ live,
W_ copied, W_ slop, uint32_t gen, uint32_t n_gc_threads,
W_ par_max_copied, W_ par_tot_copied);
#ifdef PROFILING
void stat_startRP(void);
void stat_endRP(uint32_t,
#ifdef DEBUG_RETAINER
uint32_t, int,
#endif
double);
#endif /* PROFILING */
#if defined(PROFILING) || defined(DEBUG)
void stat_startHeapCensus(void);
void stat_endHeapCensus(void);
#endif
void stat_startExit(void);
void stat_endExit(void);
void stat_exit(void);
void stat_workerStop(void);
void initStats0(void);
void initStats1(void);
double mut_user_time_until(Time t);
double mut_user_time(void);
void statDescribeGens( void );
Time stat_getElapsedGCTime(void);
Time stat_getElapsedTime(void);
#include "EndPrivate.h"
#endif /* STATS_H */
| snoyberg/ghc | rts/Stats.h | C | bsd-3-clause | 1,762 |
# Generated by Django 2.2.6 on 2019-10-23 09:06
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import olympia.amo.models
class Migration(migrations.Migration):
dependencies = [
('scanners', '0008_auto_20191021_1718'),
]
operations = [
migrations.CreateModel(
name='ScannerMatch',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(blank=True, default=django.utils.timezone.now, editable=False)),
('modified', models.DateTimeField(auto_now=True)),
('result', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scanners.ScannerResult')),
('rule', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scanners.ScannerRule')),
],
options={
'get_latest_by': 'created',
'abstract': False,
'base_manager_name': 'objects',
},
bases=(olympia.amo.models.SearchMixin, olympia.amo.models.SaveUpdateMixin, models.Model),
),
migrations.AddField(
model_name='scannerresult',
name='matched_rules',
field=models.ManyToManyField(through='scanners.ScannerMatch', to='scanners.ScannerRule'),
),
]
| bqbn/addons-server | src/olympia/scanners/migrations/0009_auto_20191023_0906.py | Python | bsd-3-clause | 1,450 |
#
# Copyright (c) 2012, NuoDB, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of NuoDB, Inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL NUODB, INC. BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
require 'active_record/connection_adapters/nuodb_adapter'
| scarey/nuodb-drivers | ruby/activerecord-nuodb-adapter/lib/activerecord-nuodb-adapter.rb | Ruby | bsd-3-clause | 1,595 |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/sync_file_system/local/local_file_sync_context.h"
#include <vector>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/file_util.h"
#include "base/files/file_path.h"
#include "base/message_loop/message_loop.h"
#include "base/stl_util.h"
#include "chrome/browser/sync_file_system/local/canned_syncable_file_system.h"
#include "chrome/browser/sync_file_system/local/local_file_change_tracker.h"
#include "chrome/browser/sync_file_system/local/sync_file_system_backend.h"
#include "chrome/browser/sync_file_system/sync_file_metadata.h"
#include "chrome/browser/sync_file_system/sync_status_code.h"
#include "chrome/browser/sync_file_system/syncable_file_system_util.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/test/mock_blob_url_request_context.h"
#include "content/public/test/test_browser_thread_bundle.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/leveldatabase/src/helpers/memenv/memenv.h"
#include "third_party/leveldatabase/src/include/leveldb/env.h"
#include "webkit/browser/fileapi/file_system_context.h"
#include "webkit/browser/fileapi/file_system_operation_runner.h"
#include "webkit/browser/fileapi/isolated_context.h"
#include "webkit/common/blob/scoped_file.h"
#define FPL FILE_PATH_LITERAL
using content::BrowserThread;
using fileapi::FileSystemContext;
using fileapi::FileSystemURL;
using fileapi::FileSystemURLSet;
// This tests LocalFileSyncContext behavior in multi-thread /
// multi-file-system-context environment.
// Basic combined tests (single-thread / single-file-system-context)
// that involve LocalFileSyncContext are also in
// syncable_file_system_unittests.cc.
namespace sync_file_system {
namespace {
const char kOrigin1[] = "http://example.com";
const char kOrigin2[] = "http://chromium.org";
}
class LocalFileSyncContextTest : public testing::Test {
protected:
LocalFileSyncContextTest()
: thread_bundle_(
content::TestBrowserThreadBundle::REAL_FILE_THREAD |
content::TestBrowserThreadBundle::REAL_IO_THREAD),
status_(SYNC_FILE_ERROR_FAILED),
file_error_(base::File::FILE_ERROR_FAILED),
async_modify_finished_(false),
has_inflight_prepare_for_sync_(false) {}
virtual void SetUp() OVERRIDE {
RegisterSyncableFileSystem();
ASSERT_TRUE(dir_.CreateUniqueTempDir());
in_memory_env_.reset(leveldb::NewMemEnv(leveldb::Env::Default()));
ui_task_runner_ = base::MessageLoop::current()->message_loop_proxy();
io_task_runner_ = BrowserThread::GetMessageLoopProxyForThread(
BrowserThread::IO);
file_task_runner_ = BrowserThread::GetMessageLoopProxyForThread(
BrowserThread::IO);
}
virtual void TearDown() OVERRIDE {
RevokeSyncableFileSystem();
}
void StartPrepareForSync(FileSystemContext* file_system_context,
const FileSystemURL& url,
LocalFileSyncContext::SyncMode sync_mode,
SyncFileMetadata* metadata,
FileChangeList* changes,
webkit_blob::ScopedFile* snapshot) {
ASSERT_TRUE(changes != NULL);
ASSERT_FALSE(has_inflight_prepare_for_sync_);
status_ = SYNC_STATUS_UNKNOWN;
has_inflight_prepare_for_sync_ = true;
sync_context_->PrepareForSync(
file_system_context,
url,
sync_mode,
base::Bind(&LocalFileSyncContextTest::DidPrepareForSync,
base::Unretained(this), metadata, changes, snapshot));
}
SyncStatusCode PrepareForSync(FileSystemContext* file_system_context,
const FileSystemURL& url,
LocalFileSyncContext::SyncMode sync_mode,
SyncFileMetadata* metadata,
FileChangeList* changes,
webkit_blob::ScopedFile* snapshot) {
StartPrepareForSync(file_system_context, url, sync_mode,
metadata, changes, snapshot);
base::MessageLoop::current()->Run();
return status_;
}
base::Closure GetPrepareForSyncClosure(
FileSystemContext* file_system_context,
const FileSystemURL& url,
LocalFileSyncContext::SyncMode sync_mode,
SyncFileMetadata* metadata,
FileChangeList* changes,
webkit_blob::ScopedFile* snapshot) {
return base::Bind(&LocalFileSyncContextTest::StartPrepareForSync,
base::Unretained(this),
base::Unretained(file_system_context),
url, sync_mode, metadata, changes, snapshot);
}
void DidPrepareForSync(SyncFileMetadata* metadata_out,
FileChangeList* changes_out,
webkit_blob::ScopedFile* snapshot_out,
SyncStatusCode status,
const LocalFileSyncInfo& sync_file_info,
webkit_blob::ScopedFile snapshot) {
ASSERT_TRUE(ui_task_runner_->RunsTasksOnCurrentThread());
has_inflight_prepare_for_sync_ = false;
status_ = status;
*metadata_out = sync_file_info.metadata;
*changes_out = sync_file_info.changes;
if (snapshot_out)
*snapshot_out = snapshot.Pass();
base::MessageLoop::current()->Quit();
}
SyncStatusCode ApplyRemoteChange(FileSystemContext* file_system_context,
const FileChange& change,
const base::FilePath& local_path,
const FileSystemURL& url,
SyncFileType expected_file_type) {
SCOPED_TRACE(testing::Message() << "ApplyChange for " <<
url.DebugString());
// First we should call PrepareForSync to disable writing.
SyncFileMetadata metadata;
FileChangeList changes;
EXPECT_EQ(SYNC_STATUS_OK,
PrepareForSync(file_system_context, url,
LocalFileSyncContext::SYNC_EXCLUSIVE,
&metadata, &changes, NULL));
EXPECT_EQ(expected_file_type, metadata.file_type);
status_ = SYNC_STATUS_UNKNOWN;
sync_context_->ApplyRemoteChange(
file_system_context, change, local_path, url,
base::Bind(&LocalFileSyncContextTest::DidApplyRemoteChange,
base::Unretained(this),
make_scoped_refptr(file_system_context), url));
base::MessageLoop::current()->Run();
return status_;
}
void DidApplyRemoteChange(FileSystemContext* file_system_context,
const FileSystemURL& url,
SyncStatusCode status) {
status_ = status;
sync_context_->FinalizeExclusiveSync(
file_system_context, url,
status == SYNC_STATUS_OK /* clear_local_changes */,
base::MessageLoop::QuitClosure());
}
void StartModifyFileOnIOThread(CannedSyncableFileSystem* file_system,
const FileSystemURL& url) {
ASSERT_TRUE(file_system != NULL);
if (!io_task_runner_->RunsTasksOnCurrentThread()) {
async_modify_finished_ = false;
ASSERT_TRUE(ui_task_runner_->RunsTasksOnCurrentThread());
io_task_runner_->PostTask(
FROM_HERE,
base::Bind(&LocalFileSyncContextTest::StartModifyFileOnIOThread,
base::Unretained(this), file_system, url));
return;
}
ASSERT_TRUE(io_task_runner_->RunsTasksOnCurrentThread());
file_error_ = base::File::FILE_ERROR_FAILED;
file_system->operation_runner()->Truncate(
url, 1, base::Bind(&LocalFileSyncContextTest::DidModifyFile,
base::Unretained(this)));
}
base::File::Error WaitUntilModifyFileIsDone() {
while (!async_modify_finished_)
base::MessageLoop::current()->RunUntilIdle();
return file_error_;
}
void DidModifyFile(base::File::Error error) {
if (!ui_task_runner_->RunsTasksOnCurrentThread()) {
ASSERT_TRUE(io_task_runner_->RunsTasksOnCurrentThread());
ui_task_runner_->PostTask(
FROM_HERE,
base::Bind(&LocalFileSyncContextTest::DidModifyFile,
base::Unretained(this), error));
return;
}
ASSERT_TRUE(ui_task_runner_->RunsTasksOnCurrentThread());
file_error_ = error;
async_modify_finished_ = true;
}
void SimulateFinishSync(FileSystemContext* file_system_context,
const FileSystemURL& url,
SyncStatusCode status,
LocalFileSyncContext::SyncMode sync_mode) {
if (sync_mode == LocalFileSyncContext::SYNC_SNAPSHOT) {
sync_context_->FinalizeSnapshotSync(
file_system_context, url, status,
base::Bind(&base::DoNothing));
} else {
sync_context_->FinalizeExclusiveSync(
file_system_context, url,
status == SYNC_STATUS_OK /* clear_local_changes */,
base::Bind(&base::DoNothing));
}
}
void PrepareForSync_Basic(LocalFileSyncContext::SyncMode sync_mode,
SyncStatusCode simulate_sync_finish_status) {
CannedSyncableFileSystem file_system(GURL(kOrigin1),
in_memory_env_.get(),
io_task_runner_.get(),
file_task_runner_.get());
file_system.SetUp(CannedSyncableFileSystem::QUOTA_ENABLED);
sync_context_ = new LocalFileSyncContext(
dir_.path(), in_memory_env_.get(),
ui_task_runner_.get(), io_task_runner_.get());
ASSERT_EQ(SYNC_STATUS_OK,
file_system.MaybeInitializeFileSystemContext(
sync_context_.get()));
ASSERT_EQ(base::File::FILE_OK, file_system.OpenFileSystem());
const FileSystemURL kFile(file_system.URL("file"));
EXPECT_EQ(base::File::FILE_OK, file_system.CreateFile(kFile));
SyncFileMetadata metadata;
FileChangeList changes;
EXPECT_EQ(SYNC_STATUS_OK,
PrepareForSync(file_system.file_system_context(), kFile,
sync_mode, &metadata, &changes, NULL));
EXPECT_EQ(1U, changes.size());
EXPECT_TRUE(changes.list().back().IsFile());
EXPECT_TRUE(changes.list().back().IsAddOrUpdate());
// We should see the same set of changes.
file_system.GetChangesForURLInTracker(kFile, &changes);
EXPECT_EQ(1U, changes.size());
EXPECT_TRUE(changes.list().back().IsFile());
EXPECT_TRUE(changes.list().back().IsAddOrUpdate());
SimulateFinishSync(file_system.file_system_context(), kFile,
simulate_sync_finish_status, sync_mode);
file_system.GetChangesForURLInTracker(kFile, &changes);
if (simulate_sync_finish_status == SYNC_STATUS_OK) {
// The change's cleared.
EXPECT_TRUE(changes.empty());
} else {
EXPECT_EQ(1U, changes.size());
EXPECT_TRUE(changes.list().back().IsFile());
EXPECT_TRUE(changes.list().back().IsAddOrUpdate());
}
sync_context_->ShutdownOnUIThread();
sync_context_ = NULL;
file_system.TearDown();
}
void PrepareForSync_WriteDuringSync(
LocalFileSyncContext::SyncMode sync_mode) {
CannedSyncableFileSystem file_system(GURL(kOrigin1),
in_memory_env_.get(),
io_task_runner_.get(),
file_task_runner_.get());
file_system.SetUp(CannedSyncableFileSystem::QUOTA_ENABLED);
sync_context_ = new LocalFileSyncContext(
dir_.path(), in_memory_env_.get(),
ui_task_runner_.get(), io_task_runner_.get());
ASSERT_EQ(SYNC_STATUS_OK,
file_system.MaybeInitializeFileSystemContext(
sync_context_.get()));
ASSERT_EQ(base::File::FILE_OK, file_system.OpenFileSystem());
const FileSystemURL kFile(file_system.URL("file"));
EXPECT_EQ(base::File::FILE_OK, file_system.CreateFile(kFile));
SyncFileMetadata metadata;
FileChangeList changes;
webkit_blob::ScopedFile snapshot;
EXPECT_EQ(SYNC_STATUS_OK,
PrepareForSync(file_system.file_system_context(), kFile,
sync_mode, &metadata, &changes, &snapshot));
EXPECT_EQ(1U, changes.size());
EXPECT_TRUE(changes.list().back().IsFile());
EXPECT_TRUE(changes.list().back().IsAddOrUpdate());
EXPECT_EQ(sync_mode == LocalFileSyncContext::SYNC_SNAPSHOT,
!snapshot.path().empty());
// Tracker keeps same set of changes.
file_system.GetChangesForURLInTracker(kFile, &changes);
EXPECT_EQ(1U, changes.size());
EXPECT_TRUE(changes.list().back().IsFile());
EXPECT_TRUE(changes.list().back().IsAddOrUpdate());
StartModifyFileOnIOThread(&file_system, kFile);
if (sync_mode == LocalFileSyncContext::SYNC_SNAPSHOT) {
// Write should succeed.
EXPECT_EQ(base::File::FILE_OK, WaitUntilModifyFileIsDone());
} else {
base::MessageLoop::current()->RunUntilIdle();
EXPECT_FALSE(async_modify_finished_);
}
SimulateFinishSync(file_system.file_system_context(), kFile,
SYNC_STATUS_OK, sync_mode);
EXPECT_EQ(base::File::FILE_OK, WaitUntilModifyFileIsDone());
// Sync succeeded, but the other change that was made during or
// after sync is recorded.
file_system.GetChangesForURLInTracker(kFile, &changes);
EXPECT_EQ(1U, changes.size());
EXPECT_TRUE(changes.list().back().IsFile());
EXPECT_TRUE(changes.list().back().IsAddOrUpdate());
sync_context_->ShutdownOnUIThread();
sync_context_ = NULL;
file_system.TearDown();
}
base::ScopedTempDir dir_;
scoped_ptr<leveldb::Env> in_memory_env_;
// These need to remain until the very end.
content::TestBrowserThreadBundle thread_bundle_;
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner_;
scoped_refptr<base::SingleThreadTaskRunner> file_task_runner_;
scoped_refptr<LocalFileSyncContext> sync_context_;
SyncStatusCode status_;
base::File::Error file_error_;
bool async_modify_finished_;
bool has_inflight_prepare_for_sync_;
};
TEST_F(LocalFileSyncContextTest, ConstructAndDestruct) {
sync_context_ =
new LocalFileSyncContext(
dir_.path(), in_memory_env_.get(),
ui_task_runner_.get(), io_task_runner_.get());
sync_context_->ShutdownOnUIThread();
}
TEST_F(LocalFileSyncContextTest, InitializeFileSystemContext) {
CannedSyncableFileSystem file_system(GURL(kOrigin1),
in_memory_env_.get(),
io_task_runner_.get(),
file_task_runner_.get());
file_system.SetUp(CannedSyncableFileSystem::QUOTA_ENABLED);
sync_context_ = new LocalFileSyncContext(
dir_.path(), in_memory_env_.get(),
ui_task_runner_.get(), io_task_runner_.get());
// Initializes file_system using |sync_context_|.
EXPECT_EQ(SYNC_STATUS_OK,
file_system.MaybeInitializeFileSystemContext(sync_context_.get()));
// Make sure everything's set up for file_system to be able to handle
// syncable file system operations.
EXPECT_TRUE(file_system.backend()->sync_context() != NULL);
EXPECT_TRUE(file_system.backend()->change_tracker() != NULL);
EXPECT_EQ(sync_context_.get(), file_system.backend()->sync_context());
// Calling MaybeInitialize for the same context multiple times must be ok.
EXPECT_EQ(SYNC_STATUS_OK,
file_system.MaybeInitializeFileSystemContext(sync_context_.get()));
EXPECT_EQ(sync_context_.get(), file_system.backend()->sync_context());
// Opens the file_system, perform some operation and see if the change tracker
// correctly captures the change.
EXPECT_EQ(base::File::FILE_OK, file_system.OpenFileSystem());
const FileSystemURL kURL(file_system.URL("foo"));
EXPECT_EQ(base::File::FILE_OK, file_system.CreateFile(kURL));
FileSystemURLSet urls;
file_system.GetChangedURLsInTracker(&urls);
ASSERT_EQ(1U, urls.size());
EXPECT_TRUE(ContainsKey(urls, kURL));
// Finishing the test.
sync_context_->ShutdownOnUIThread();
file_system.TearDown();
}
TEST_F(LocalFileSyncContextTest, MultipleFileSystemContexts) {
CannedSyncableFileSystem file_system1(GURL(kOrigin1),
in_memory_env_.get(),
io_task_runner_.get(),
file_task_runner_.get());
CannedSyncableFileSystem file_system2(GURL(kOrigin2),
in_memory_env_.get(),
io_task_runner_.get(),
file_task_runner_.get());
file_system1.SetUp(CannedSyncableFileSystem::QUOTA_ENABLED);
file_system2.SetUp(CannedSyncableFileSystem::QUOTA_ENABLED);
sync_context_ = new LocalFileSyncContext(
dir_.path(), in_memory_env_.get(),
ui_task_runner_.get(), io_task_runner_.get());
// Initializes file_system1 and file_system2.
EXPECT_EQ(SYNC_STATUS_OK,
file_system1.MaybeInitializeFileSystemContext(sync_context_.get()));
EXPECT_EQ(SYNC_STATUS_OK,
file_system2.MaybeInitializeFileSystemContext(sync_context_.get()));
EXPECT_EQ(base::File::FILE_OK, file_system1.OpenFileSystem());
EXPECT_EQ(base::File::FILE_OK, file_system2.OpenFileSystem());
const FileSystemURL kURL1(file_system1.URL("foo"));
const FileSystemURL kURL2(file_system2.URL("bar"));
// Creates a file in file_system1.
EXPECT_EQ(base::File::FILE_OK, file_system1.CreateFile(kURL1));
// file_system1's tracker must have recorded the change.
FileSystemURLSet urls;
file_system1.GetChangedURLsInTracker(&urls);
ASSERT_EQ(1U, urls.size());
EXPECT_TRUE(ContainsKey(urls, kURL1));
// file_system1's tracker must have no change.
urls.clear();
file_system2.GetChangedURLsInTracker(&urls);
ASSERT_TRUE(urls.empty());
// Creates a directory in file_system2.
EXPECT_EQ(base::File::FILE_OK, file_system2.CreateDirectory(kURL2));
// file_system1's tracker must have the change for kURL1 as before.
urls.clear();
file_system1.GetChangedURLsInTracker(&urls);
ASSERT_EQ(1U, urls.size());
EXPECT_TRUE(ContainsKey(urls, kURL1));
// file_system2's tracker now must have the change for kURL2.
urls.clear();
file_system2.GetChangedURLsInTracker(&urls);
ASSERT_EQ(1U, urls.size());
EXPECT_TRUE(ContainsKey(urls, kURL2));
SyncFileMetadata metadata;
FileChangeList changes;
EXPECT_EQ(SYNC_STATUS_OK,
PrepareForSync(file_system1.file_system_context(), kURL1,
LocalFileSyncContext::SYNC_EXCLUSIVE,
&metadata, &changes, NULL));
EXPECT_EQ(1U, changes.size());
EXPECT_TRUE(changes.list().back().IsFile());
EXPECT_TRUE(changes.list().back().IsAddOrUpdate());
EXPECT_EQ(SYNC_FILE_TYPE_FILE, metadata.file_type);
EXPECT_EQ(0, metadata.size);
changes.clear();
EXPECT_EQ(SYNC_STATUS_OK,
PrepareForSync(file_system2.file_system_context(), kURL2,
LocalFileSyncContext::SYNC_EXCLUSIVE,
&metadata, &changes, NULL));
EXPECT_EQ(1U, changes.size());
EXPECT_FALSE(changes.list().back().IsFile());
EXPECT_TRUE(changes.list().back().IsAddOrUpdate());
EXPECT_EQ(SYNC_FILE_TYPE_DIRECTORY, metadata.file_type);
EXPECT_EQ(0, metadata.size);
sync_context_->ShutdownOnUIThread();
sync_context_ = NULL;
file_system1.TearDown();
file_system2.TearDown();
}
TEST_F(LocalFileSyncContextTest, PrepareSync_SyncSuccess_Exclusive) {
PrepareForSync_Basic(LocalFileSyncContext::SYNC_EXCLUSIVE,
SYNC_STATUS_OK);
}
TEST_F(LocalFileSyncContextTest, PrepareSync_SyncSuccess_Snapshot) {
PrepareForSync_Basic(LocalFileSyncContext::SYNC_SNAPSHOT,
SYNC_STATUS_OK);
}
TEST_F(LocalFileSyncContextTest, PrepareSync_SyncFailure_Exclusive) {
PrepareForSync_Basic(LocalFileSyncContext::SYNC_EXCLUSIVE,
SYNC_STATUS_FAILED);
}
TEST_F(LocalFileSyncContextTest, PrepareSync_SyncFailure_Snapshot) {
PrepareForSync_Basic(LocalFileSyncContext::SYNC_SNAPSHOT,
SYNC_STATUS_FAILED);
}
TEST_F(LocalFileSyncContextTest, PrepareSync_WriteDuringSync_Exclusive) {
PrepareForSync_WriteDuringSync(LocalFileSyncContext::SYNC_EXCLUSIVE);
}
TEST_F(LocalFileSyncContextTest, PrepareSync_WriteDuringSync_Snapshot) {
PrepareForSync_WriteDuringSync(LocalFileSyncContext::SYNC_SNAPSHOT);
}
// LocalFileSyncContextTest.PrepareSyncWhileWriting is flaky on android.
// http://crbug.com/239793
// It is also flaky on the TSAN v2 bots, and hangs other bots.
// http://crbug.com/305905.
TEST_F(LocalFileSyncContextTest, DISABLED_PrepareSyncWhileWriting) {
CannedSyncableFileSystem file_system(GURL(kOrigin1),
in_memory_env_.get(),
io_task_runner_.get(),
file_task_runner_.get());
file_system.SetUp(CannedSyncableFileSystem::QUOTA_ENABLED);
sync_context_ = new LocalFileSyncContext(
dir_.path(), in_memory_env_.get(),
ui_task_runner_.get(), io_task_runner_.get());
EXPECT_EQ(SYNC_STATUS_OK,
file_system.MaybeInitializeFileSystemContext(sync_context_.get()));
EXPECT_EQ(base::File::FILE_OK, file_system.OpenFileSystem());
const FileSystemURL kURL1(file_system.URL("foo"));
// Creates a file in file_system.
EXPECT_EQ(base::File::FILE_OK, file_system.CreateFile(kURL1));
// Kick file write on IO thread.
StartModifyFileOnIOThread(&file_system, kURL1);
// Until the operation finishes PrepareForSync should return BUSY error.
SyncFileMetadata metadata;
metadata.file_type = SYNC_FILE_TYPE_UNKNOWN;
FileChangeList changes;
EXPECT_EQ(SYNC_STATUS_FILE_BUSY,
PrepareForSync(file_system.file_system_context(), kURL1,
LocalFileSyncContext::SYNC_EXCLUSIVE,
&metadata, &changes, NULL));
EXPECT_EQ(SYNC_FILE_TYPE_FILE, metadata.file_type);
// Register PrepareForSync method to be invoked when kURL1 becomes
// syncable. (Actually this may be done after all operations are done
// on IO thread in this test.)
metadata.file_type = SYNC_FILE_TYPE_UNKNOWN;
changes.clear();
sync_context_->RegisterURLForWaitingSync(
kURL1, GetPrepareForSyncClosure(file_system.file_system_context(), kURL1,
LocalFileSyncContext::SYNC_EXCLUSIVE,
&metadata, &changes, NULL));
// Wait for the completion.
EXPECT_EQ(base::File::FILE_OK, WaitUntilModifyFileIsDone());
// The PrepareForSync must have been started; wait until DidPrepareForSync
// is done.
base::MessageLoop::current()->Run();
ASSERT_FALSE(has_inflight_prepare_for_sync_);
// Now PrepareForSync should have run and returned OK.
EXPECT_EQ(SYNC_STATUS_OK, status_);
EXPECT_EQ(1U, changes.size());
EXPECT_TRUE(changes.list().back().IsFile());
EXPECT_TRUE(changes.list().back().IsAddOrUpdate());
EXPECT_EQ(SYNC_FILE_TYPE_FILE, metadata.file_type);
EXPECT_EQ(1, metadata.size);
sync_context_->ShutdownOnUIThread();
sync_context_ = NULL;
file_system.TearDown();
}
TEST_F(LocalFileSyncContextTest, ApplyRemoteChangeForDeletion) {
CannedSyncableFileSystem file_system(GURL(kOrigin1),
in_memory_env_.get(),
io_task_runner_.get(),
file_task_runner_.get());
file_system.SetUp(CannedSyncableFileSystem::QUOTA_ENABLED);
sync_context_ = new LocalFileSyncContext(
dir_.path(), in_memory_env_.get(),
ui_task_runner_.get(), io_task_runner_.get());
ASSERT_EQ(SYNC_STATUS_OK,
file_system.MaybeInitializeFileSystemContext(sync_context_.get()));
ASSERT_EQ(base::File::FILE_OK, file_system.OpenFileSystem());
// Record the initial usage (likely 0).
int64 initial_usage = -1;
int64 quota = -1;
EXPECT_EQ(quota::kQuotaStatusOk,
file_system.GetUsageAndQuota(&initial_usage, "a));
// Create a file and directory in the file_system.
const FileSystemURL kFile(file_system.URL("file"));
const FileSystemURL kDir(file_system.URL("dir"));
const FileSystemURL kChild(file_system.URL("dir/child"));
EXPECT_EQ(base::File::FILE_OK, file_system.CreateFile(kFile));
EXPECT_EQ(base::File::FILE_OK, file_system.CreateDirectory(kDir));
EXPECT_EQ(base::File::FILE_OK, file_system.CreateFile(kChild));
// file_system's change tracker must have recorded the creation.
FileSystemURLSet urls;
file_system.GetChangedURLsInTracker(&urls);
ASSERT_EQ(3U, urls.size());
ASSERT_TRUE(ContainsKey(urls, kFile));
ASSERT_TRUE(ContainsKey(urls, kDir));
ASSERT_TRUE(ContainsKey(urls, kChild));
for (FileSystemURLSet::iterator iter = urls.begin();
iter != urls.end(); ++iter) {
file_system.ClearChangeForURLInTracker(*iter);
}
// At this point the usage must be greater than the initial usage.
int64 new_usage = -1;
EXPECT_EQ(quota::kQuotaStatusOk,
file_system.GetUsageAndQuota(&new_usage, "a));
EXPECT_GT(new_usage, initial_usage);
// Now let's apply remote deletion changes.
FileChange change(FileChange::FILE_CHANGE_DELETE,
SYNC_FILE_TYPE_FILE);
EXPECT_EQ(SYNC_STATUS_OK,
ApplyRemoteChange(file_system.file_system_context(),
change, base::FilePath(), kFile,
SYNC_FILE_TYPE_FILE));
// The implementation doesn't check file type for deletion, and it must be ok
// even if we don't know if the deletion change was for a file or a directory.
change = FileChange(FileChange::FILE_CHANGE_DELETE,
SYNC_FILE_TYPE_UNKNOWN);
EXPECT_EQ(SYNC_STATUS_OK,
ApplyRemoteChange(file_system.file_system_context(),
change, base::FilePath(), kDir,
SYNC_FILE_TYPE_DIRECTORY));
// Check the directory/files are deleted successfully.
EXPECT_EQ(base::File::FILE_ERROR_NOT_FOUND,
file_system.FileExists(kFile));
EXPECT_EQ(base::File::FILE_ERROR_NOT_FOUND,
file_system.DirectoryExists(kDir));
EXPECT_EQ(base::File::FILE_ERROR_NOT_FOUND,
file_system.FileExists(kChild));
// The changes applied by ApplyRemoteChange should not be recorded in
// the change tracker.
urls.clear();
file_system.GetChangedURLsInTracker(&urls);
EXPECT_TRUE(urls.empty());
// The quota usage data must have reflected the deletion.
EXPECT_EQ(quota::kQuotaStatusOk,
file_system.GetUsageAndQuota(&new_usage, "a));
EXPECT_EQ(new_usage, initial_usage);
sync_context_->ShutdownOnUIThread();
sync_context_ = NULL;
file_system.TearDown();
}
TEST_F(LocalFileSyncContextTest, ApplyRemoteChangeForDeletion_ForRoot) {
CannedSyncableFileSystem file_system(GURL(kOrigin1),
in_memory_env_.get(),
io_task_runner_.get(),
file_task_runner_.get());
file_system.SetUp(CannedSyncableFileSystem::QUOTA_ENABLED);
sync_context_ = new LocalFileSyncContext(
dir_.path(), in_memory_env_.get(),
ui_task_runner_.get(), io_task_runner_.get());
ASSERT_EQ(SYNC_STATUS_OK,
file_system.MaybeInitializeFileSystemContext(sync_context_.get()));
ASSERT_EQ(base::File::FILE_OK, file_system.OpenFileSystem());
// Record the initial usage (likely 0).
int64 initial_usage = -1;
int64 quota = -1;
EXPECT_EQ(quota::kQuotaStatusOk,
file_system.GetUsageAndQuota(&initial_usage, "a));
// Create a file and directory in the file_system.
const FileSystemURL kFile(file_system.URL("file"));
const FileSystemURL kDir(file_system.URL("dir"));
const FileSystemURL kChild(file_system.URL("dir/child"));
EXPECT_EQ(base::File::FILE_OK, file_system.CreateFile(kFile));
EXPECT_EQ(base::File::FILE_OK, file_system.CreateDirectory(kDir));
EXPECT_EQ(base::File::FILE_OK, file_system.CreateFile(kChild));
// At this point the usage must be greater than the initial usage.
int64 new_usage = -1;
EXPECT_EQ(quota::kQuotaStatusOk,
file_system.GetUsageAndQuota(&new_usage, "a));
EXPECT_GT(new_usage, initial_usage);
const FileSystemURL kRoot(file_system.URL(""));
// Now let's apply remote deletion changes for the root.
FileChange change(FileChange::FILE_CHANGE_DELETE, SYNC_FILE_TYPE_DIRECTORY);
EXPECT_EQ(SYNC_STATUS_OK,
ApplyRemoteChange(file_system.file_system_context(),
change, base::FilePath(), kRoot,
SYNC_FILE_TYPE_DIRECTORY));
// Check the directory/files are deleted successfully.
EXPECT_EQ(base::File::FILE_ERROR_NOT_FOUND,
file_system.FileExists(kFile));
EXPECT_EQ(base::File::FILE_ERROR_NOT_FOUND,
file_system.DirectoryExists(kDir));
EXPECT_EQ(base::File::FILE_ERROR_NOT_FOUND,
file_system.FileExists(kChild));
// All changes made for the previous creation must have been also reset.
FileSystemURLSet urls;
file_system.GetChangedURLsInTracker(&urls);
EXPECT_TRUE(urls.empty());
// The quota usage data must have reflected the deletion.
EXPECT_EQ(quota::kQuotaStatusOk,
file_system.GetUsageAndQuota(&new_usage, "a));
EXPECT_EQ(new_usage, initial_usage);
sync_context_->ShutdownOnUIThread();
sync_context_ = NULL;
file_system.TearDown();
}
TEST_F(LocalFileSyncContextTest, ApplyRemoteChangeForAddOrUpdate) {
base::ScopedTempDir temp_dir;
ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
CannedSyncableFileSystem file_system(GURL(kOrigin1),
in_memory_env_.get(),
io_task_runner_.get(),
file_task_runner_.get());
file_system.SetUp(CannedSyncableFileSystem::QUOTA_ENABLED);
sync_context_ = new LocalFileSyncContext(
dir_.path(), in_memory_env_.get(),
ui_task_runner_.get(), io_task_runner_.get());
ASSERT_EQ(SYNC_STATUS_OK,
file_system.MaybeInitializeFileSystemContext(sync_context_.get()));
ASSERT_EQ(base::File::FILE_OK, file_system.OpenFileSystem());
const FileSystemURL kFile1(file_system.URL("file1"));
const FileSystemURL kFile2(file_system.URL("file2"));
const FileSystemURL kDir(file_system.URL("dir"));
const char kTestFileData0[] = "0123456789";
const char kTestFileData1[] = "Lorem ipsum!";
const char kTestFileData2[] = "This is sample test data.";
// Create kFile1 and populate it with kTestFileData0.
EXPECT_EQ(base::File::FILE_OK, file_system.CreateFile(kFile1));
EXPECT_EQ(static_cast<int64>(arraysize(kTestFileData0) - 1),
file_system.WriteString(kFile1, kTestFileData0));
// kFile2 and kDir are not there yet.
EXPECT_EQ(base::File::FILE_ERROR_NOT_FOUND,
file_system.FileExists(kFile2));
EXPECT_EQ(base::File::FILE_ERROR_NOT_FOUND,
file_system.DirectoryExists(kDir));
// file_system's change tracker must have recorded the creation.
FileSystemURLSet urls;
file_system.GetChangedURLsInTracker(&urls);
ASSERT_EQ(1U, urls.size());
EXPECT_TRUE(ContainsKey(urls, kFile1));
file_system.ClearChangeForURLInTracker(*urls.begin());
// Prepare temporary files which represent the remote file data.
const base::FilePath kFilePath1(temp_dir.path().Append(FPL("file1")));
const base::FilePath kFilePath2(temp_dir.path().Append(FPL("file2")));
ASSERT_EQ(static_cast<int>(arraysize(kTestFileData1) - 1),
base::WriteFile(kFilePath1, kTestFileData1,
arraysize(kTestFileData1) - 1));
ASSERT_EQ(static_cast<int>(arraysize(kTestFileData2) - 1),
base::WriteFile(kFilePath2, kTestFileData2,
arraysize(kTestFileData2) - 1));
// Record the usage.
int64 usage = -1, new_usage = -1;
int64 quota = -1;
EXPECT_EQ(quota::kQuotaStatusOk,
file_system.GetUsageAndQuota(&usage, "a));
// Here in the local filesystem we have:
// * kFile1 with kTestFileData0
//
// In the remote side let's assume we have:
// * kFile1 with kTestFileData1
// * kFile2 with kTestFileData2
// * kDir
//
// By calling ApplyChange's:
// * kFile1 will be updated to have kTestFileData1
// * kFile2 will be created
// * kDir will be created
// Apply the remote change to kFile1 (which will update the file).
FileChange change(FileChange::FILE_CHANGE_ADD_OR_UPDATE,
SYNC_FILE_TYPE_FILE);
EXPECT_EQ(SYNC_STATUS_OK,
ApplyRemoteChange(file_system.file_system_context(),
change, kFilePath1, kFile1,
SYNC_FILE_TYPE_FILE));
// Check if the usage has been increased by (kTestFileData1 - kTestFileData0).
const int updated_size =
arraysize(kTestFileData1) - arraysize(kTestFileData0);
EXPECT_EQ(quota::kQuotaStatusOk,
file_system.GetUsageAndQuota(&new_usage, "a));
EXPECT_EQ(updated_size, new_usage - usage);
// Apply remote changes to kFile2 and kDir (should create a file and
// directory respectively).
// They are non-existent yet so their expected file type (the last
// parameter of ApplyRemoteChange) are
// SYNC_FILE_TYPE_UNKNOWN.
change = FileChange(FileChange::FILE_CHANGE_ADD_OR_UPDATE,
SYNC_FILE_TYPE_FILE);
EXPECT_EQ(SYNC_STATUS_OK,
ApplyRemoteChange(file_system.file_system_context(),
change, kFilePath2, kFile2,
SYNC_FILE_TYPE_UNKNOWN));
change = FileChange(FileChange::FILE_CHANGE_ADD_OR_UPDATE,
SYNC_FILE_TYPE_DIRECTORY);
EXPECT_EQ(SYNC_STATUS_OK,
ApplyRemoteChange(file_system.file_system_context(),
change, base::FilePath(), kDir,
SYNC_FILE_TYPE_UNKNOWN));
// Calling ApplyRemoteChange with different file type should be handled as
// overwrite.
change =
FileChange(FileChange::FILE_CHANGE_ADD_OR_UPDATE, SYNC_FILE_TYPE_FILE);
EXPECT_EQ(SYNC_STATUS_OK,
ApplyRemoteChange(file_system.file_system_context(),
change,
kFilePath1,
kDir,
SYNC_FILE_TYPE_DIRECTORY));
EXPECT_EQ(base::File::FILE_OK, file_system.FileExists(kDir));
change = FileChange(FileChange::FILE_CHANGE_ADD_OR_UPDATE,
SYNC_FILE_TYPE_DIRECTORY);
EXPECT_EQ(SYNC_STATUS_OK,
ApplyRemoteChange(file_system.file_system_context(),
change,
kFilePath1,
kDir,
SYNC_FILE_TYPE_FILE));
// Creating a file/directory must have increased the usage more than
// the size of kTestFileData2.
new_usage = usage;
EXPECT_EQ(quota::kQuotaStatusOk,
file_system.GetUsageAndQuota(&new_usage, "a));
EXPECT_GT(new_usage,
static_cast<int64>(usage + arraysize(kTestFileData2) - 1));
// The changes applied by ApplyRemoteChange should not be recorded in
// the change tracker.
urls.clear();
file_system.GetChangedURLsInTracker(&urls);
EXPECT_TRUE(urls.empty());
// Make sure all three files/directory exist.
EXPECT_EQ(base::File::FILE_OK, file_system.FileExists(kFile1));
EXPECT_EQ(base::File::FILE_OK, file_system.FileExists(kFile2));
EXPECT_EQ(base::File::FILE_OK, file_system.DirectoryExists(kDir));
sync_context_->ShutdownOnUIThread();
file_system.TearDown();
}
TEST_F(LocalFileSyncContextTest, ApplyRemoteChangeForAddOrUpdate_NoParent) {
base::ScopedTempDir temp_dir;
ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
CannedSyncableFileSystem file_system(GURL(kOrigin1),
in_memory_env_.get(),
io_task_runner_.get(),
file_task_runner_.get());
file_system.SetUp(CannedSyncableFileSystem::QUOTA_ENABLED);
sync_context_ = new LocalFileSyncContext(
dir_.path(), in_memory_env_.get(),
ui_task_runner_.get(), io_task_runner_.get());
ASSERT_EQ(SYNC_STATUS_OK,
file_system.MaybeInitializeFileSystemContext(sync_context_.get()));
ASSERT_EQ(base::File::FILE_OK, file_system.OpenFileSystem());
const char kTestFileData[] = "Lorem ipsum!";
const FileSystemURL kDir(file_system.URL("dir"));
const FileSystemURL kFile(file_system.URL("dir/file"));
// Either kDir or kFile not exist yet.
EXPECT_EQ(base::File::FILE_ERROR_NOT_FOUND, file_system.FileExists(kDir));
EXPECT_EQ(base::File::FILE_ERROR_NOT_FOUND, file_system.FileExists(kFile));
// Prepare a temporary file which represents remote file data.
const base::FilePath kFilePath(temp_dir.path().Append(FPL("file")));
ASSERT_EQ(static_cast<int>(arraysize(kTestFileData) - 1),
base::WriteFile(kFilePath, kTestFileData,
arraysize(kTestFileData) - 1));
// Calling ApplyChange's with kFilePath should create
// kFile along with kDir.
FileChange change(FileChange::FILE_CHANGE_ADD_OR_UPDATE,
SYNC_FILE_TYPE_FILE);
EXPECT_EQ(SYNC_STATUS_OK,
ApplyRemoteChange(file_system.file_system_context(),
change, kFilePath, kFile,
SYNC_FILE_TYPE_UNKNOWN));
// The changes applied by ApplyRemoteChange should not be recorded in
// the change tracker.
FileSystemURLSet urls;
urls.clear();
file_system.GetChangedURLsInTracker(&urls);
EXPECT_TRUE(urls.empty());
// Make sure kDir and kFile are created by ApplyRemoteChange.
EXPECT_EQ(base::File::FILE_OK, file_system.FileExists(kFile));
EXPECT_EQ(base::File::FILE_OK, file_system.DirectoryExists(kDir));
sync_context_->ShutdownOnUIThread();
file_system.TearDown();
}
} // namespace sync_file_system
| ondra-novak/chromium.src | chrome/browser/sync_file_system/local/local_file_sync_context_unittest.cc | C++ | bsd-3-clause | 38,460 |
/*!
* jQuery twitter bootstrap wizard plugin
* Examples and documentation at: http://github.com/VinceG/twitter-bootstrap-wizard
* version 1.0
* Requires jQuery v1.3.2 or later
* Dual licensed under the MIT and GPL licenses:
* http://www.opensource.org/licenses/mit-license.php
* http://www.gnu.org/licenses/gpl.html
* Authors: Vadim Vincent Gabriel (http://vadimg.com)
*/
;
(function ($) {
var bootstrapWizardCreate = function (element, options) {
var element = $(element);
var obj = this;
// Merge options with defaults
//var $settings = $.extend($.fn.bootstrapWizard.defaults, options || {});
var $settings = $.extend({}, $.fn.bootstrapWizard.defaults, options);
var $activeTab = null;
var $navigation = null;
this.fixNavigationButtons = function () {
// Get the current active tab
if (!$activeTab.length) {
// Select first one
$navigation.find('a:first').tab('show');
$activeTab = $navigation.find('li:first');
}
// See if we currently in the first then disable the previous and last buttons
if (obj.firstIndex() >= obj.currentIndex()) {
$('li.previous', element).addClass('disabled');
} else {
$('li.previous', element).removeClass('disabled');
}
if (obj.currentIndex() >= obj.navigationLength()) {
$('li.next', element).addClass('disabled');
} else {
$('li.next', element).removeClass('disabled');
}
if ($settings.onTabShow && typeof $settings.onTabShow === 'function' && $settings.onTabShow($activeTab, $navigation, obj.currentIndex()) === false) {
return false;
}
};
this.next = function (e) {
// If we clicked the last then dont activate this
if (element.hasClass('last')) {
return false;
}
if ($settings.onNext && typeof $settings.onNext === 'function' && $settings.onNext($activeTab, $navigation, obj.nextIndex()) === false) {
return false;
}
// Did we click the last button
$index = obj.nextIndex();
if ($index > obj.navigationLength()) {
} else {
$navigation.find('li:eq(' + $index + ') a').tab('show');
}
};
this.previous = function (e) {
// If we clicked the first then dont activate this
if (element.hasClass('first')) {
return false;
}
if ($settings.onPrevious && typeof $settings.onPrevious === 'function' && $settings.onPrevious($activeTab, $navigation, obj.previousIndex()) === false) {
return false;
}
$index = obj.previousIndex();
if ($index < 0) {
} else {
$navigation.find('li:eq(' + $index + ') a').tab('show');
}
};
this.first = function (e) {
if ($settings.onFirst && typeof $settings.onFirst === 'function' && $settings.onFirst($activeTab, $navigation, obj.firstIndex()) === false) {
return false;
}
// If the element is disabled then we won't do anything
if (element.hasClass('disabled')) {
return false;
}
$navigation.find('li:eq(0) a').tab('show');
};
this.last = function (e) {
if ($settings.onLast && typeof $settings.onLast === 'function' && $settings.onLast($activeTab, $navigation, obj.lastIndex()) === false) {
return false;
}
// If the element is disabled then we won't do anything
if (element.hasClass('disabled')) {
return false;
}
$navigation.find('li:eq(' + obj.navigationLength() + ') a').tab('show');
};
this.currentIndex = function () {
return $navigation.find('li').index($activeTab);
};
this.firstIndex = function () {
return 0;
};
this.lastIndex = function () {
return obj.navigationLength();
};
this.getIndex = function (elem) {
return $navigation.find('li').index(elem);
};
this.nextIndex = function () {
return $navigation.find('li').index($activeTab) + 1;
};
this.previousIndex = function () {
return $navigation.find('li').index($activeTab) - 1;
};
this.navigationLength = function () {
return $navigation.find('li').length - 1;
};
this.activeTab = function () {
return $activeTab;
};
this.nextTab = function () {
return $navigation.find('li:eq(' + (obj.currentIndex() + 1) + ')').length ? $navigation.find('li:eq(' + (obj.currentIndex() + 1) + ')') : null;
};
this.previousTab = function () {
if (obj.currentIndex() <= 0) {
return null;
}
return $navigation.find('li:eq(' + parseInt(obj.currentIndex() - 1) + ')');
};
$navigation = element.find('ul:first', element);
$activeTab = $navigation.find('li.active', element);
if (!$navigation.hasClass($settings.class)) {
$navigation.addClass($settings.class);
}
// Load onShow
if ($settings.onInit && typeof $settings.onInit === 'function') {
$settings.onInit($activeTab, $navigation, 0);
}
// Next/Previous events
$($settings.nextSelector, element).bind('click', obj.next);
$($settings.previousSelector, element).bind('click', obj.previous);
$($settings.lastSelector, element).bind('click', obj.last);
$($settings.firstSelector, element).bind('click', obj.first);
// Load onShow
if ($settings.onShow && typeof $settings.onShow === 'function') {
$settings.onShow($activeTab, $navigation, obj.nextIndex());
}
// Work the next/previous buttons
obj.fixNavigationButtons();
$('a[data-toggle="tab"]', element).on('click', function (e) {
if ($settings.onTabClick && typeof $settings.onTabClick === 'function' && $settings.onTabClick($activeTab, $navigation, obj.currentIndex()) === false) {
return false;
}
});
$('a[data-toggle="tab"]', element).on('show', function (e) {
$element = $(e.target).parent();
// If it's disabled then do not change
if ($element.hasClass('disabled')) {
return false;
}
$activeTab = $element; // activated tab
obj.fixNavigationButtons();
});
};
$.fn.bootstrapWizard = function (options) {
return this.each(function (index) {
var element = $(this);
// Return early if this element already has a plugin instance
if (element.data('bootstrapWizard')) return;
// pass options to plugin constructor
var wizard = new bootstrapWizardCreate(element, options);
// Store plugin object in this element's data
element.data('bootstrapWizard', wizard);
});
};
// expose options
$.fn.bootstrapWizard.defaults = {
'class':'nav nav-pills',
'nextSelector':'.wizard li.next',
'previousSelector':'.wizard li.previous',
'firstSelector':'.wizard li.first',
'lastSelector':'.wizard li.last',
'onShow':null,
'onInit':null,
'onNext':null,
'onPrevious':null,
'onLast':null,
'onFirst':null,
'onTabClick':null,
'onTabShow':null
};
})(jQuery);
| flesch91/uaweb-work.github.com | web/booster-install/assets/js/jquery.bootstrap.wizard.js | JavaScript | bsd-3-clause | 7,891 |
<!DOCTYPE html>
<!--
Copyright (c) 2013 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<link rel="import" href="/tracing/core/test_utils.html">
<link rel="import" href="/tracing/extras/importer/linux_perf/ftrace_importer.html">
<script>
'use strict';
tr.b.unittest.testSuite(function() {
test('drmImport', function() {
const lines = [
' chrome-2465 [000] 71.653157: drm_vblank_event: crtc=0, seq=4233',
' <idle>-0 [000] 71.669851: drm_vblank_event: crtc=0, seq=4234'
];
const m = tr.c.TestUtils.newModelWithEvents([lines.join('\n')], {
shiftWorldToZero: false
});
assert.isFalse(m.hasImportWarnings);
const threads = m.getAllThreads();
assert.strictEqual(threads.length, 1);
const vblankThread = threads[0];
assert.strictEqual(vblankThread.name, 'drm_vblank');
assert.strictEqual(vblankThread.sliceGroup.length, 2);
});
});
</script>
| catapult-project/catapult-csm | tracing/tracing/extras/importer/linux_perf/drm_parser_test.html | HTML | bsd-3-clause | 1,011 |
/*
* Copyright (c) 2002-2015, the original author or authors.
*
* This software is distributable under the BSD license. See the terms of the
* BSD license in the documentation provided with this software.
*
* http://www.opensource.org/licenses/bsd-license.php
*/
package jline.internal;
import java.util.ArrayList;
import java.util.List;
import static jline.internal.Preconditions.checkNotNull;
/**
* Manages the JLine shutdown-hook thread and tasks to execute on shutdown.
*
* @author <a href="mailto:jason@planet57.com">Jason Dillon</a>
* @since 2.7
*/
public class ShutdownHooks
{
public static final String JLINE_SHUTDOWNHOOK = "jline.shutdownhook";
private static final boolean enabled = Configuration.getBoolean(JLINE_SHUTDOWNHOOK, true);
private static final List<Task> tasks = new ArrayList<Task>();
private static Thread hook;
public static synchronized <T extends Task> T add(final T task) {
checkNotNull(task);
// If not enabled ignore
if (!enabled) {
Log.debug("Shutdown-hook is disabled; not installing: ", task);
return task;
}
// Install the hook thread if needed
if (hook == null) {
hook = addHook(new Thread("JLine Shutdown Hook")
{
@Override
public void run() {
runTasks();
}
});
}
// Track the task
Log.debug("Adding shutdown-hook task: ", task);
tasks.add(task);
return task;
}
private static synchronized void runTasks() {
Log.debug("Running all shutdown-hook tasks");
// Iterate through copy of tasks list
for (Task task : tasks.toArray(new Task[tasks.size()])) {
Log.debug("Running task: ", task);
try {
task.run();
}
catch (Throwable e) {
Log.warn("Task failed", e);
}
}
tasks.clear();
}
private static Thread addHook(final Thread thread) {
Log.debug("Registering shutdown-hook: ", thread);
try {
Runtime.getRuntime().addShutdownHook(thread);
}
catch (AbstractMethodError e) {
// JDK 1.3+ only method. Bummer.
Log.debug("Failed to register shutdown-hook", e);
}
return thread;
}
public static synchronized void remove(final Task task) {
checkNotNull(task);
// ignore if not enabled or hook never installed
if (!enabled || hook == null) {
return;
}
// Drop the task
tasks.remove(task);
// If there are no more tasks, then remove the hook thread
if (tasks.isEmpty()) {
removeHook(hook);
hook = null;
}
}
private static void removeHook(final Thread thread) {
Log.debug("Removing shutdown-hook: ", thread);
try {
Runtime.getRuntime().removeShutdownHook(thread);
}
catch (AbstractMethodError e) {
// JDK 1.3+ only method. Bummer.
Log.debug("Failed to remove shutdown-hook", e);
}
catch (IllegalStateException e) {
// The VM is shutting down, not a big deal; ignore
}
}
/**
* Essentially a {@link Runnable} which allows running to throw an exception.
*/
public static interface Task
{
void run() throws Exception;
}
} | kaulkie/jline2 | src/main/java/jline/internal/ShutdownHooks.java | Java | bsd-3-clause | 3,503 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.