repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
neo4j-contrib/neo4j-apoc-procedures | test-utils/src/main/java/apoc/periodic/PeriodicTestUtils.java | 2248 | package apoc.periodic;
import org.neo4j.common.DependencyResolver;
import org.neo4j.internal.helpers.collection.Iterators;
import org.neo4j.kernel.api.exceptions.Status;
import org.neo4j.kernel.impl.api.KernelTransactions;
import org.neo4j.kernel.internal.GraphDatabaseAPI;
import org.neo4j.test.rule.DbmsRule;
import java.util.Map;
import static apoc.util.TestUtil.testResult;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class PeriodicTestUtils {
public static void killPeriodicQueryAsync(DbmsRule db) {
new Thread(() -> {
int retries = 10;
try {
while (retries-- > 0 && !terminateQuery("apoc.periodic", db)) {
Thread.sleep(10);
}
} catch (InterruptedException e) {
// ignore
}
}).start();
}
public static boolean terminateQuery(String pattern, GraphDatabaseAPI db) {
DependencyResolver dependencyResolver = db.getDependencyResolver();
KernelTransactions kernelTransactions = dependencyResolver.resolveDependency(KernelTransactions.class);
long numberOfKilledTransactions = kernelTransactions.activeTransactions().stream()
.filter(kernelTransactionHandle ->
kernelTransactionHandle.executingQuery().map(query -> query.rawQueryText().contains(pattern))
.orElse(false)
)
.map(kernelTransactionHandle -> kernelTransactionHandle.markForTermination(Status.Transaction.Terminated))
.count();
return numberOfKilledTransactions > 0;
}
public static void testTerminatePeriodicQuery(DbmsRule db, String periodicQuery) {
killPeriodicQueryAsync(db);
try {
testResult(db, periodicQuery, result -> {
Map<String, Object> row = Iterators.single(result);
assertEquals( periodicQuery + " result: " + row.toString(), true, row.get("wasTerminated"));
});
fail("Should have terminated");
} catch(Exception tfe) {
assertEquals(tfe.getMessage(),true, tfe.getMessage().contains("terminated"));
}
}
}
| apache-2.0 |
hongyan99/chart-faces | chartfaces/src/main/java/org/javaq/chartfaces/render/svg/Paths.java | 9431 | package org.javaq.chartfaces.render.svg;
import java.util.HashMap;
import java.util.Map;
import org.javaq.chartfaces.util.IStringBuilder;
import org.javaq.chartfaces.util.IStringBuilderFactory;
import org.javaq.chartfaces.util.NumberRoundingStringBuilder;
import org.javaq.chartfaces.util.NumberUtils;
/**
* A simple bean that holds the x, y coordinate pairs as double arrays.
*
* @author Hongyan Li
*
*/
public class Paths {
private static final class NumberRoundingStringBuilderFactory implements
IStringBuilderFactory {
private final int sigDigit;
private NumberRoundingStringBuilderFactory(int sigDigit) {
this.sigDigit = sigDigit;
}
@Override
public IStringBuilder newBuilder() {
return new NumberRoundingStringBuilder(sigDigit);
}
}
private double[][] box;
private double[] xArray;
private double[] yArray;
/**
* Construct a <code>Paths</code> from the passed in pair of coordinate
* arrays. The two arrays must have the exact same length.
*
* @param x
* the x-coordinates of the paths. Cannot be null.
* @param y
* the y-coordinates of the paths. Cannot be null.
* @throws NullPointerException
* if either of the passed in is null.
* @throws IllegalArgumentException
* if the passed in do not have the same array length.
*/
public static Paths newPaths(final double[] x, final double[] y) {
return new Paths(x,y);
}
public static Paths newStarPaths(final Paths polygon1, final Paths polygon2) {
return new RegularStarPath0(polygon1, polygon2);
}
public static Paths newStarPaths(final Paths polygon, final int q) {
return new RegularStarPath1(polygon, q);
}
private Paths(final double[] x, final double[] y) {
if (x.length != y.length) {
throw new IllegalArgumentException(
"xArray and yArray lengths are different!");
}
this.xArray = x.clone();
this.yArray = y.clone();
}
/**
* Construct an empty paths. Internal use only.
*/
protected Paths() {
// internal use only
}
/**
* Lazily find the imaginary rectangular box that encloses the paths passed
* in.
*
* @return the {@link Paths} of a rectangle that encloses the passed in.
* Note: the xArray and yArray of the returned <code>Paths</code> is
* ordered (smaller value first).
*/
public double[][] boxMe() {
if (this.box == null) {
this.box = NumberUtils.boxMe(getXArray(), getYArray());
}
return this.box.clone();
}
/**
* Box the box of this so that all coordinates in this are boxed-in even
* after truncation using the passed in toleranceHint.
*
* @param toleranceHint
* a hint for truncation of the coordinates. The actual tolerance
* will be such that as if the passed in was the tolerance when
* the range of the number was 0 to 1.
* @return a double[][] that contains {xMin, xMax, xSigNum} and {yMin, yMax,
* ySigNum} where xMin and yMin are the lower bound of the box, xMax
* and yMax are the upper. xSigNum and ySigNum are the numbers that
* indicate the position of the most significant number (1 indicates
* 10 is the most significant, -1, the first decimal point).
*/
public double[][] boxMe(final double toleranceHint) {
final double[][] theBox = boxMe();
return NumberUtils.boxMe(theBox, toleranceHint);
}
double[] getXArray() {
return this.xArray;
}
double[] getYArray() {
return this.yArray;
}
/**
* Renders this, treating the paths coordinates as absolute coordinates. The
* coordinates will be truncated before rendering. The passed in
* toleranceHint will be used to determine how the numbers will be
* truncated.
*
* @param toleranceHint
* a hint for truncation of the coordinates. The actual tolerance
* will be such that as if the passed in was the tolerance when
* the range of the number was 0 to 1.
* @param closed
* whether the path should rendered as closed.
* @return the rendering result.
*/
public String renderAbsolute(final double toleranceHint,
final boolean closed) {
final double[][] myBox = boxMe(toleranceHint);
PathsUtil.truncateDecimals(myBox[0][2], myBox[1][2], new Paths(
getXArray().clone(), getYArray().clone()));
return PathsUtil.getDefaultInstance()
.renderAbsolutePaths(getXArray(), getYArray(), closed);
}
/**
* Renders this, treating the paths coordinates as absolute coordinates. The
* coordinates will be truncated before rendering. The passed in
* toleranceHint will be used to determine how the numbers will be
* truncated.
*
* @param sigDigit
* an integer indicates the significant digit of the numbers. -1
* is the 10th
* @param closed
* whether the path should rendered as closed.
* @return the rendering result.
*/
public String renderAbsolute(final int sigDigit, final boolean closed) {
final PathsUtil util = new PathsUtil(new NumberRoundingStringBuilderFactory(sigDigit));
return util.renderAbsolutePaths(getXArray(), getYArray(), closed);
}
public int size() {
return getXArray().length;
}
protected void setXArray(final double[] x) {
this.xArray = x.clone();
this.box = null;
}
protected void setYArray(final double[] y) {
this.yArray = y.clone();
this.box = null;
}
/**
* A star-shaped polygon paths. Internal use only.
*
* @author Hongyan Li
* @see <a href="http://en.wikipedia.org/wiki/Star-shaped_polygon">Star-shaped
* polygon</a>.
*/
private static class RegularStarPath0 extends Paths {
/**
* Construct this from the passed in pair of polygon {@link Paths}.
*
* @param polygon1
* the {@link Paths} of a polygon. Cannot be null and must have
* the same size as <code>polygon2</code>.
* @param polygon2
* the {@link Paths} of a polygon. Cannot be null and must have
* the same size as <code>polygon1</code>.
* @throws NullPointerException
* if either <code>polygon1</code> or <code>polygon2</code> is
* null.
* @throws IllegalArgumentException
* if the size of <code>polygon1</code> is different from that
* of <code>polygon2</code>.
*/
public RegularStarPath0(final Paths polygon1, final Paths polygon2) {
if (polygon1.size() != polygon2.size()) {
throw new IllegalArgumentException(
"The passed in two polygons do not have the same size.");
}
final double[] x1 = polygon1.getXArray();
final double[] y1 = polygon1.getYArray();
final double[] x2 = polygon2.getXArray();
final double[] y2 = polygon2.getYArray();
final int sides = x1.length;
final double[] xArray = new double[sides * 2];
final double[] yArray = new double[sides * 2];
int j = 0;
for (int i = 0; i < sides; i++) {
j = 2 * i;
xArray[j] = x2[i];
xArray[j + 1] = x1[i];
yArray[j] = y2[i];
yArray[j + 1] = y1[i];
}
setXArray(xArray);
setYArray(yArray);
}
}
/**
* A p/q polygon star polygon paths. Internal use only.
*
* @author Hongyan Li
* @see <a href="http://en.wikipedia.org/wiki/Star_polygon">Star polygon</a>.
*/
private static class RegularStarPath1 extends Paths {
// we should not have too many of these int[] objects.
private static Map<String, int[]> indexMaps = new HashMap<String, int[]>();
/**
* Construct this from the passed in polygon {@link Paths} and a q value.
*
* @param polygon
* the {@link Paths} for a polygon. Size must be greater than 2.
* @param q
* a q value. Must be greater than 0 and not greater than
* floor((p-1)/2), where p is the size of the passed in
* <code>polygon</code>.
* @throws IllegalArgumentException
* if the value of q is less than 1 or greater than floor of
* (p-1)/2.
* @throws NullPointerException
* if <code>polygon</code> is null.
*/
public RegularStarPath1(final Paths polygon, final int q) {
if (q < 1 || q > (polygon.size() - 1) / 2) {
throw new IllegalArgumentException("Invalid value for p.");
}
final int[] indexMap = getCreateIndexMap(polygon, q);
final double[] xA = polygon.getXArray();
final double[] yA = polygon.getYArray();
final double[] xArray = new double[xA.length];
final double[] yArray = new double[yA.length];
for (int i = 0; i < xArray.length; i++) {
xArray[i] = xA[indexMap[i]];
yArray[i] = yA[indexMap[i]];
}
setXArray(xArray);
setYArray(yArray);
}
private int[] getCreateIndexMap(final Paths polygon, final int q) {
final int len = polygon.getXArray().length;
final String key = "" + len + '_' + q;
int[] map = RegularStarPath1.indexMaps.get(key);
if (map == null) {
// create the map and add to the collection
map = new int[len];
int k = 0;
for (int j = 0; j < q; j++) {
for (int i = j; i < len; i += q) {
map[k++] = i;
}
}
RegularStarPath1.indexMaps.put(key, map);
}
return map;
}
}
}
| apache-2.0 |
mwringe/fabric8 | components/kubernetes-api/src/main/java/io/fabric8/kubernetes/api/Controller.java | 43576 | /**
* Copyright 2005-2015 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.kubernetes.api;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.fabric8.kubernetes.api.extensions.Templates;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.fabric8.kubernetes.api.model.KubernetesList;
import io.fabric8.kubernetes.api.model.Namespace;
import io.fabric8.kubernetes.api.model.ObjectMeta;
import io.fabric8.kubernetes.api.model.Pod;
import io.fabric8.kubernetes.api.model.PodSpec;
import io.fabric8.kubernetes.api.model.PodTemplateSpec;
import io.fabric8.kubernetes.api.model.ReplicationController;
import io.fabric8.kubernetes.api.model.ReplicationControllerSpec;
import io.fabric8.kubernetes.api.model.Secret;
import io.fabric8.kubernetes.api.model.SecretVolumeSource;
import io.fabric8.kubernetes.api.model.Service;
import io.fabric8.kubernetes.api.model.ServiceAccount;
import io.fabric8.kubernetes.api.model.Volume;
import io.fabric8.kubernetes.client.DefaultKubernetesClient;
import io.fabric8.kubernetes.client.KubernetesClient;
import io.fabric8.kubernetes.client.OpenShiftClient;
import io.fabric8.openshift.api.model.BuildConfig;
import io.fabric8.openshift.api.model.DeploymentConfig;
import io.fabric8.openshift.api.model.ImageStream;
import io.fabric8.openshift.api.model.OAuthClient;
import io.fabric8.openshift.api.model.Route;
import io.fabric8.openshift.api.model.Template;
import io.fabric8.utils.Files;
import io.fabric8.utils.IOHelpers;
import io.fabric8.utils.Objects;
import io.fabric8.utils.Strings;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import static io.fabric8.kubernetes.api.KubernetesHelper.getName;
import static io.fabric8.kubernetes.api.KubernetesHelper.getObjectId;
import static io.fabric8.kubernetes.api.KubernetesHelper.getOrCreateMetadata;
import static io.fabric8.kubernetes.api.KubernetesHelper.loadJson;
import static io.fabric8.kubernetes.api.KubernetesHelper.summaryText;
import static io.fabric8.kubernetes.api.KubernetesHelper.toItemList;
import static io.fabric8.kubernetes.api.KubernetesHelper.toOpenshift;
/**
* Applies DTOs to the current Kubernetes master
*/
public class Controller {
private static final transient Logger LOG = LoggerFactory.getLogger(Controller.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private final KubernetesClient kubernetes;
private Map<String, Pod> podMap;
private Map<String, ReplicationController> replicationControllerMap;
private Map<String, Service> serviceMap;
private boolean throwExceptionOnError = true;
private boolean allowCreate = true;
private boolean recreateMode;
private boolean servicesOnlyMode;
private boolean ignoreServiceMode;
private boolean ignoreRunningOAuthClients = true;
private boolean processTemplatesLocally;
private File logJsonDir;
private File basedir;
private boolean failOnMissingParameterValue;
private boolean supportOAuthClients;
private boolean deletePodsOnReplicationControllerUpdate = true;
private String namesapce = KubernetesHelper.defaultNamespace();
public Controller() {
this(new DefaultKubernetesClient());
}
public Controller(KubernetesClient kubernetes) {
this.kubernetes = kubernetes;
}
public String apply(File file) throws Exception {
String ext = Files.getFileExtension(file);
if ("yaml".equalsIgnoreCase(ext)) {
return applyYaml(file);
} else if ("json".equalsIgnoreCase(ext)) {
return applyJson(file);
} else {
throw new IllegalArgumentException("Unknown file type " + ext);
}
}
/**
* Applies the given JSON to the underlying REST APIs in a single operation without needing to explicitly parse first.
*/
public String applyJson(byte[] json) throws Exception {
Object dto = loadJson(json);
apply(dto, "REST call");
return "";
}
/**
* Applies the given JSON to the underlying REST APIs in a single operation without needing to explicitly parse first.
*/
public String applyJson(String json) throws Exception {
Object dto = loadJson(json);
apply(dto, "REST call");
return "";
}
/**
* Applies the given JSON to the underlying REST APIs in a single operation without needing to explicitly parse first.
*/
public String applyJson(File json) throws Exception {
Object dto = loadJson(json);
apply(dto, "REST call");
return "";
}
/**
* Applies the given YAML to the underlying REST APIs in a single operation without needing to explicitly parse first.
*/
public String applyYaml(String yaml) throws Exception {
String json = convertYamlToJson(yaml);
Object dto = loadJson(json);
apply(dto, "REST call");
return "";
}
/**
* Applies the given YAML to the underlying REST APIs in a single operation without needing to explicitly parse first.
*/
public String applyYaml(File yaml) throws Exception {
String json = convertYamlToJson(yaml);
Object dto = loadJson(json);
apply(dto, "REST call");
return "";
}
private String convertYamlToJson(String yamlString) throws FileNotFoundException {
Yaml yaml = new Yaml();
Map<String, Object> map = (Map<String, Object>) yaml.load(yamlString);
JSONObject jsonObject = new JSONObject(map);
return jsonObject.toString();
}
private String convertYamlToJson(File yamlFile) throws FileNotFoundException {
Yaml yaml = new Yaml();
FileInputStream fstream = new FileInputStream(yamlFile);
Map<String, Object> map = (Map<String, Object>) yaml.load(fstream);
JSONObject jsonObject = new JSONObject(map);
return jsonObject.toString();
}
/**
* Applies the given JSON to the underlying REST APIs in a single operation without needing to explicitly parse first.
*/
public String applyJson(InputStream json) throws Exception {
Object dto = loadJson(json);
apply(dto, "REST call");
return "";
}
/**
* Applies the given DTOs onto the Kubernetes master
*/
public void apply(Object dto, String sourceName) throws Exception {
if (dto instanceof List) {
List list = (List) dto;
for (Object element : list) {
if (dto == element) {
LOG.warn("Found recursive nested object for " + dto + " of class: " + dto.getClass().getName());
continue;
}
apply(element, sourceName);
}
} else if (dto instanceof KubernetesList) {
applyList((KubernetesList) dto, sourceName);
} else if (dto != null) {
applyEntity(dto, sourceName);
}
}
/**
* Applies the given DTOs onto the Kubernetes master
*/
public void applyEntity(Object dto, String sourceName) throws Exception {
if (dto instanceof Pod) {
applyPod((Pod) dto, sourceName);
} else if (dto instanceof ReplicationController) {
applyReplicationController((ReplicationController) dto, sourceName);
} else if (dto instanceof Service) {
applyService((Service) dto, sourceName);
} else if (dto instanceof Namespace) {
applyNamespace((Namespace) dto);
} else if (dto instanceof Route) {
applyRoute((Route) dto, sourceName);
} else if (dto instanceof BuildConfig) {
applyBuildConfig((BuildConfig) dto, sourceName);
} else if (dto instanceof DeploymentConfig) {
applyDeploymentConfig((DeploymentConfig) dto, sourceName);
} else if (dto instanceof ImageStream) {
applyImageStream((ImageStream) dto, sourceName);
} else if (dto instanceof OAuthClient) {
applyOAuthClient((OAuthClient) dto, sourceName);
} else if (dto instanceof Template) {
applyTemplate((Template) dto, sourceName);
} else if (dto instanceof ServiceAccount) {
applyServiceAccount((ServiceAccount) dto, sourceName);
} else if (dto instanceof Secret) {
applySecret((Secret) dto, sourceName);
} else {
throw new IllegalArgumentException("Unknown entity type " + dto);
}
}
public void applyOAuthClient(OAuthClient entity, String sourceName) {
OpenShiftClient openShiftClient = toOpenshift(kubernetes);
if (supportOAuthClients) {
String id = getName(entity);
Objects.notNull(id, "No name for " + entity + " " + sourceName);
if (isServicesOnlyMode()) {
LOG.debug("Only processing Services right now so ignoring OAuthClient: " + id);
return;
}
OAuthClient old = openShiftClient.oAuthClients().withName(id).getIfExists();
if (isRunning(old)) {
if (isIgnoreRunningOAuthClients()) {
LOG.info("Not updating the OAuthClient which are shared across namespaces as its already running");
return;
}
if (UserConfigurationCompare.configEqual(entity, old)) {
LOG.info("OAuthClient hasn't changed so not doing anything");
} else {
if (isRecreateMode()) {
openShiftClient.oAuthClients().withName(id).delete();
doCreateOAuthClient(entity, sourceName);
} else {
try {
Object answer = openShiftClient.oAuthClients().withName(id).update(entity);
LOG.info("Updated pod result: " + answer);
} catch (Exception e) {
onApplyError("Failed to update pod from " + sourceName + ". " + e + ". " + entity, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating an OAuthClient from " + sourceName + " name " + getName(entity));
} else {
doCreateOAuthClient(entity, sourceName);
}
}
}
}
protected void doCreateOAuthClient(OAuthClient entity, String sourceName) {
Object result = null;
try {
result = toOpenshift(kubernetes).oAuthClients().create(entity);
} catch (Exception e) {
onApplyError("Failed to create OAuthClient from " + sourceName + ". " + e + ". " + entity, e);
}
}
/**
* Creates/updates the template and processes it returning the processed DTOs
*/
public Object applyTemplate(Template entity, String sourceName) throws Exception {
if (!isProcessTemplatesLocally()) {
String namespace = getNamespace();
String id = getName(entity);
Objects.notNull(id, "No name for " + entity + " " + sourceName);
Template old = kubernetes.templates().inNamespace(namespace).withName(id).getIfExists();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(entity, old)) {
LOG.info("Template hasn't changed so not doing anything");
} else {
boolean recreateMode = isRecreateMode();
// TODO seems you can't update templates right now
recreateMode = true;
if (recreateMode) {
kubernetes.templates().inNamespace(namespace).withName(id).delete();
doCreateTemplate(entity, namespace, sourceName);
} else {
LOG.info("Updating a entity from " + sourceName);
try {
Object answer = kubernetes.templates().inNamespace(namespace).withName(id).update(entity);
LOG.info("Updated entity: " + answer);
} catch (Exception e) {
onApplyError("Failed to update controller from " + sourceName + ". " + e + ". " + entity, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a entity from " + sourceName + " namespace " + namespace + " name " + getName(entity));
} else {
doCreateTemplate(entity, namespace, sourceName);
}
}
}
return processTemplate(entity, sourceName);
}
protected void doCreateTemplate(Template entity, String namespace, String sourceName) {
LOG.info("Creating a template from " + sourceName + " namespace " + namespace + " name " + getName(entity));
try {
Object answer = kubernetes.templates().inNamespace(namespace).create(entity);
logGeneratedEntity("Created template: ", namespace, entity, answer);
} catch (Exception e) {
onApplyError("Failed to template entity from " + sourceName + ". " + e + ". " + entity, e);
}
}
/**
* Creates/updates a service account and processes it returning the processed DTOs
*/
public void applyServiceAccount(ServiceAccount serviceAccount, String sourceName) throws Exception {
String namespace = getNamespace();
String id = getName(serviceAccount);
Objects.notNull(id, "No name for " + serviceAccount + " " + sourceName);
if (isServicesOnlyMode()) {
LOG.debug("Only processing Services right now so ignoring ServiceAccount: " + id);
return;
}
ServiceAccount old = kubernetes.serviceAccounts().inNamespace(namespace).withName(id).getIfExists();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(serviceAccount, old)) {
LOG.info("ServiceAccount hasn't changed so not doing anything");
} else {
if (isRecreateMode()) {
kubernetes.serviceAccounts().inNamespace(namespace).withName(id).delete();
doCreateServiceAccount(serviceAccount, namespace, sourceName);
} else {
LOG.info("Updating a service account from " + sourceName);
try {
Object answer = kubernetes.serviceAccounts().inNamespace(namespace).withName(id).update(serviceAccount);
logGeneratedEntity("Updated service account: ", namespace, serviceAccount, answer);
} catch (Exception e) {
onApplyError("Failed to update service account from " + sourceName + ". " + e + ". " + serviceAccount, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a service from " + sourceName + " namespace " + namespace + " name " + getName(serviceAccount));
} else {
doCreateServiceAccount(serviceAccount, namespace, sourceName);
}
}
}
protected void doCreateServiceAccount(ServiceAccount serviceAccount, String namespace, String sourceName) {
LOG.info("Creating a service account from " + sourceName + " namespace " + namespace + " name " + getName
(serviceAccount));
try {
Object answer;
if (Strings.isNotBlank(namespace)) {
answer = kubernetes.serviceAccounts().inNamespace(namespace).create(serviceAccount);
} else {
answer = kubernetes.serviceAccounts().inNamespace(getNamespace()).create(serviceAccount);
}
logGeneratedEntity("Created service account: ", namespace, serviceAccount, answer);
} catch (Exception e) {
onApplyError("Failed to create service account from " + sourceName + ". " + e + ". " + serviceAccount, e);
}
}
public void applySecret(Secret secret, String sourceName) throws Exception {
String namespace = getNamespace();
String id = getName(secret);
Objects.notNull(id, "No name for " + secret + " " + sourceName);
if (isServicesOnlyMode()) {
LOG.debug("Only processing Services right now so ignoring Secrets: " + id);
return;
}
Secret old = kubernetes.secrets().inNamespace(namespace).withName(id).getIfExists();
// check if the secret already exists or not
if (isRunning(old)) {
// if the secret already exists and is the same, then do nothing
if (UserConfigurationCompare.configEqual(secret, old)) {
LOG.info("Secret hasn't changed so not doing anything");
return;
} else {
if (isRecreateMode()) {
kubernetes.secrets().inNamespace(namespace).withName(id).delete();
doCreateSecret(secret, namespace, sourceName);
} else {
LOG.info("Updateing a secret from " + sourceName);
try {
Object answer = kubernetes.secrets().inNamespace(namespace).withName(id).update(secret);
logGeneratedEntity("Updated secret:", namespace, secret, answer);
} catch (Exception e) {
onApplyError("Failed to update secret from " + sourceName + ". " + e + ". " + secret, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a secret from " + sourceName + " namespace " + namespace + " name " + getName(secret));
} else {
doCreateSecret(secret, namespace, sourceName);
}
}
}
protected void doCreateSecret(Secret secret, String namespace, String sourceName) {
LOG.info("Creating a secret from " + sourceName + " namespace " + namespace + " name " + getName(secret));
try {
Object answer;
if (Strings.isNotBlank(namespace)) {
answer = kubernetes.secrets().inNamespace(namespace).create(secret);
} else {
answer = kubernetes.secrets().inNamespace(getNamespace()).create(secret);
}
logGeneratedEntity("Created secret: ", namespace, secret, answer);
} catch (Exception e) {
onApplyError("Failed to create secret from " + sourceName + ". " + e + ". " + secret, e);
}
}
protected void logGeneratedEntity(String message, String namespace, HasMetadata entity, Object result) {
if (logJsonDir != null) {
File namespaceDir = new File(logJsonDir, namespace);
namespaceDir.mkdirs();
String kind = KubernetesHelper.getKind(entity);
String name = KubernetesHelper.getName(entity);
if (Strings.isNotBlank(kind)) {
name = kind.toLowerCase() + "-" + name;
}
if (Strings.isNullOrBlank(name)) {
LOG.warn("No name for the entity " + entity);
} else {
String fileName = name + ".json";
File file = new File(namespaceDir, fileName);
if (file.exists()) {
int idx = 1;
while (true) {
fileName = name + "-" + idx++ + ".json";
file = new File(namespaceDir, fileName);
if (!file.exists()) {
break;
}
}
}
String text;
if (result instanceof String) {
text = result.toString();
} else {
try {
text = KubernetesHelper.toJson(result);
} catch (JsonProcessingException e) {
LOG.warn("Could not convert " + result + " to JSON: " + e, e);
if (result != null) {
text = result.toString();
} else {
text = "null";
}
}
}
try {
IOHelpers.writeFully(file, text);
Object fileLocation = file;
if (basedir != null) {
String path = Files.getRelativePath(basedir, file);
if (path != null) {
fileLocation = Strings.stripPrefix(path, "/");
}
}
LOG.info(message + fileLocation);
} catch (IOException e) {
LOG.warn("Failed to write to file " + file + ". " + e, e);
}
return;
}
}
LOG.info(message + result);
}
public Object processTemplate(Template entity, String sourceName) {
try {
return Templates.processTemplatesLocally(entity, failOnMissingParameterValue);
} catch (IOException e) {
onApplyError("Failed to process template " + sourceName + ". " + e + ". " + entity, e);
return null;
}
/* Let's do it in the client side.
String id = getName(entity);
Objects.notNull(id, "No name for " + entity + " " + sourceName);
String namespace = KubernetesHelper.getNamespace(entity);
LOG.info("Creating Template " + namespace + ":" + id + " " + summaryText(entity));
Object result = null;
try {
Template response = kubernetes.templates().inNamespace(namespace).create(entity);
String json = OBJECT_MAPPER.writeValueAsString(response);
logGeneratedEntity("Template processed into: ", namespace, entity, json);
result = loadJson(json);
printSummary(result);
} catch (Exception e) {
onApplyError("Failed to create controller from " + sourceName + ". " + e + ". " + entity, e);
}
return result;
*/
}
protected void printSummary(Object kubeResource) throws IOException {
if (kubeResource != null) {
LOG.debug(" " + kubeResource.getClass().getSimpleName() + " " + kubeResource);
}
if (kubeResource instanceof Template) {
Template template = (Template) kubeResource;
String id = getName(template);
LOG.info(" Template " + id + " " + summaryText(template));
printSummary(template.getObjects());
return;
}
List<HasMetadata> list = toItemList(kubeResource);
for (HasMetadata object : list) {
if (object != null) {
if (object == list) {
LOG.warn("Ignoring recursive list " + list);
continue;
} else if (object instanceof List) {
printSummary(object);
} else {
String kind = object.getClass().getSimpleName();
String id = getObjectId(object);
LOG.info(" " + kind + " " + id + " " + summaryText(object));
}
}
}
}
public void applyRoute(Route entity, String sourceName) {
OpenShiftClient openShiftClient = toOpenshift(kubernetes);
String id = getName(entity);
Objects.notNull(id, "No name for " + entity + " " + sourceName);
String namespace = KubernetesHelper.getNamespace(entity);
if (Strings.isNullOrBlank(namespace)) {
namespace = getNamespace();
}
Route route = openShiftClient.routes().inNamespace(namespace).withName(id).getIfExists();
if (route == null) {
try {
LOG.info("Creating Route " + namespace + ":" + id + " " + KubernetesHelper.summaryText(entity));
openShiftClient.routes().inNamespace(namespace).create(entity);
} catch (Exception e) {
onApplyError("Failed to create Route from " + sourceName + ". " + e + ". " + entity, e);
}
}
}
public void applyBuildConfig(BuildConfig entity, String sourceName) {
String id = getName(entity);
OpenShiftClient openShiftClient = toOpenshift(kubernetes);
Objects.notNull(id, "No name for " + entity + " " + sourceName);
String namespace = KubernetesHelper.getNamespace(entity);
if (Strings.isNullOrBlank(namespace)) {
namespace = getNamespace();
}
BuildConfig old = openShiftClient.buildConfigs().inNamespace(namespace).withName(id).getIfExists();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(entity, old)) {
LOG.info("BuildConfig hasn't changed so not doing anything");
} else {
if (isRecreateMode()) {
openShiftClient.buildConfigs().inNamespace(namespace).withName(id).delete();
doCreateBuildConfig(entity, namespace, sourceName);
} else {
LOG.info("Updating BuildConfig from " + sourceName);
try {
String resourceVersion = KubernetesHelper.getResourceVersion(old);
ObjectMeta metadata = KubernetesHelper.getOrCreateMetadata(entity);
metadata.setNamespace(namespace);
metadata.setResourceVersion(resourceVersion);
Object answer = openShiftClient.buildConfigs().inNamespace(namespace).withName(id).update(entity);
logGeneratedEntity("Updated BuildConfig: ", namespace, entity, answer);
} catch (Exception e) {
onApplyError("Failed to update BuildConfig from " + sourceName + ". " + e + ". " + entity, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating BuildConfig from " + sourceName + " namespace " + namespace + " name " + getName(entity));
} else {
doCreateBuildConfig(entity, namespace, sourceName);
}
}
}
public void doCreateBuildConfig(BuildConfig entity, String namespace ,String sourceName) {
try {
toOpenshift(kubernetes).buildConfigs().inNamespace(namespace).create(entity);
} catch (Exception e) {
onApplyError("Failed to create BuildConfig from " + sourceName + ". " + e, e);
}
}
public void applyDeploymentConfig(DeploymentConfig entity, String sourceName) {
try {
toOpenshift(kubernetes).deploymentConfigs().inNamespace(getNamespace()).create(entity);
} catch (Exception e) {
onApplyError("Failed to create DeploymentConfig from " + sourceName + ". " + e, e);
}
}
public void applyImageStream(ImageStream entity, String sourceName) {
try {
toOpenshift(kubernetes).imageStreams().inNamespace(getNamespace()).create(entity);
} catch (Exception e) {
onApplyError("Failed to create BuildConfig from " + sourceName + ". " + e, e);
}
}
public void applyList(KubernetesList list, String sourceName) throws Exception {
List<HasMetadata> entities = list.getItems();
if (entities != null) {
for (Object entity : entities) {
applyEntity(entity, sourceName);
}
}
}
public void applyService(Service service, String sourceName) throws Exception {
String namespace = getNamespace();
String id = getName(service);
Objects.notNull(id, "No name for " + service + " " + sourceName);
if (isIgnoreServiceMode()) {
LOG.debug("Ignoring Service: " + namespace + ":" + id);
return;
}
Service old = kubernetes.services().inNamespace(namespace).withName(id).getIfExists();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(service, old)) {
LOG.info("Service hasn't changed so not doing anything");
} else {
if (isRecreateMode()) {
kubernetes.services().inNamespace(namespace).withName(id).delete();
doCreateService(service, namespace, sourceName);
} else {
LOG.info("Updating a service from " + sourceName);
try {
Object answer = kubernetes.services().inNamespace(namespace).withName(id).update(service);
logGeneratedEntity("Updated service: ", namespace, service, answer);
} catch (Exception e) {
onApplyError("Failed to update controller from " + sourceName + ". " + e + ". " + service, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a service from " + sourceName + " namespace " + namespace + " name " + getName(service));
} else {
doCreateService(service, namespace, sourceName);
}
}
}
protected void doCreateService(Service service, String namespace, String sourceName) {
LOG.info("Creating a service from " + sourceName + " namespace " + namespace + " name " + getName(service));
try {
Object answer;
if (Strings.isNotBlank(namespace)) {
answer = kubernetes.services().inNamespace(namespace).create(service);
} else {
answer = kubernetes.services().inNamespace(getNamespace()).create(service);
}
logGeneratedEntity("Created service: ", namespace, service, answer);
} catch (Exception e) {
onApplyError("Failed to create service from " + sourceName + ". " + e + ". " + service, e);
}
}
public void applyNamespace(String namespaceName) {
Namespace entity = new Namespace();
getOrCreateMetadata(entity).setName(namespaceName);
applyNamespace(entity);
}
public void applyNamespace(Namespace entity) {
String namespace = getOrCreateMetadata(entity).getName();
LOG.info("Creating a namespace " + namespace);
String name = getName(entity);
Objects.notNull(name, "No name for " + entity );
Namespace old = kubernetes.namespaces().withName(name).getIfExists();
if (!isRunning(old)) {
try {
Object answer = kubernetes.namespaces().create(entity);
logGeneratedEntity("Created namespace: ", namespace, entity, answer);
} catch (Exception e) {
onApplyError("Failed to create namespace. " + e + ". " + entity, e);
}
}
}
public void applyReplicationController(ReplicationController replicationController, String sourceName) throws Exception {
String namespace = getNamespace();
String id = getName(replicationController);
Objects.notNull(id, "No name for " + replicationController + " " + sourceName);
if (isServicesOnlyMode()) {
LOG.debug("Only processing Services right now so ignoring ReplicationController: " + namespace + ":" + id);
return;
}
ReplicationController old = kubernetes.replicationControllers().inNamespace(namespace).withName(id).getIfExists();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(replicationController, old)) {
LOG.info("ReplicationController hasn't changed so not doing anything");
} else {
if (isRecreateMode()) {
kubernetes.replicationControllers().inNamespace(namespace).withName(id).delete();
doCreateReplicationController(replicationController, namespace, sourceName);
} else {
LOG.info("Updating replicationController from " + sourceName + " namespace " + namespace + " name " + getName(replicationController));
try {
Object answer = kubernetes.replicationControllers().inNamespace(namespace).withName(id).update(replicationController);
logGeneratedEntity("Updated replicationController: ", namespace, replicationController, answer);
if (deletePodsOnReplicationControllerUpdate) {
kubernetes.replicationControllers().inNamespace(namespace).withName(KubernetesHelper.getName(replicationController)).delete();
LOG.info("Deleting any pods for the replication controller to ensure they use the new configuration");
} else {
LOG.info("Warning not deleted any pods so they could well be running with the old configuration!");
}
} catch (Exception e) {
onApplyError("Failed to update replicationController from " + sourceName + ". " + e + ". " + replicationController, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a replicationController from " + sourceName + " namespace " + namespace + " name " + getName(replicationController));
} else {
doCreateReplicationController(replicationController, namespace, sourceName);
}
}
}
protected void doCreateReplicationController(ReplicationController replicationController, String namespace, String sourceName) {
LOG.info("Creating a replicationController from " + sourceName + " namespace " + namespace + " name " + getName(replicationController));
try {
// lets check that if secrets are required they exist
ReplicationControllerSpec spec = replicationController.getSpec();
if (spec != null) {
PodTemplateSpec template = spec.getTemplate();
if (template != null) {
PodSpec podSpec = template.getSpec();
validatePodSpec(podSpec, namespace);
}
}
Object answer;
if (Strings.isNotBlank(namespace)) {
answer = kubernetes.replicationControllers().inNamespace(namespace).create(replicationController);
} else {
answer = kubernetes.replicationControllers().inNamespace(getNamespace()).create(replicationController);
}
logGeneratedEntity("Created replicationController: ", namespace, replicationController, answer);
} catch (Exception e) {
onApplyError("Failed to create replicationController from " + sourceName + ". " + e + ". " + replicationController, e);
}
}
/**
* Lets verify that any dependencies are available; such as volumes or secrets
*/
protected void validatePodSpec(PodSpec podSpec, String namespace) {
List<Volume> volumes = podSpec.getVolumes();
if (volumes != null) {
for (Volume volume : volumes) {
SecretVolumeSource secret = volume.getSecret();
if (secret != null) {
String secretName = secret.getSecretName();
if (Strings.isNotBlank(secretName)) {
KubernetesHelper.validateSecretExists(kubernetes, namespace, secretName);
}
}
}
}
}
public void applyPod(Pod pod, String sourceName) throws Exception {
String namespace = getNamespace();
String id = getName(pod);
Objects.notNull(id, "No name for " + pod + " " + sourceName);
if (isServicesOnlyMode()) {
LOG.debug("Only processing Services right now so ignoring Pod: " + namespace + ":" + id);
return;
}
Pod old = kubernetes.pods().inNamespace(namespace).withName(id).getIfExists();
if (isRunning(old)) {
if (UserConfigurationCompare.configEqual(pod, old)) {
LOG.info("Pod hasn't changed so not doing anything");
} else {
if (isRecreateMode()) {
kubernetes.pods().inNamespace(namespace).withName(id).delete();
doCreatePod(pod, namespace, sourceName);
} else {
LOG.info("Updating a pod from " + sourceName + " namespace " + namespace + " name " + getName(pod));
try {
Object answer = kubernetes.pods().inNamespace(namespace).withName(id).update(pod);
LOG.info("Updated pod result: " + answer);
} catch (Exception e) {
onApplyError("Failed to update pod from " + sourceName + ". " + e + ". " + pod, e);
}
}
}
} else {
if (!isAllowCreate()) {
LOG.warn("Creation disabled so not creating a pod from " + sourceName + " namespace " + namespace + " name " + getName(pod));
} else {
doCreatePod(pod, namespace, sourceName);
}
}
}
protected void doCreatePod(Pod pod, String namespace, String sourceName) {
LOG.info("Creating a pod from " + sourceName + " namespace " + namespace + " name " + getName(pod));
try {
PodSpec podSpec = pod.getSpec();
if (podSpec != null) {
validatePodSpec(podSpec, namespace);
}
Object answer;
if (Strings.isNotBlank(namespace)) {
answer = kubernetes.pods().inNamespace(namespace).create(pod);
} else {
answer = kubernetes.pods().inNamespace(getNamespace()).create(pod);
}
LOG.info("Created pod result: " + answer);
} catch (Exception e) {
onApplyError("Failed to create pod from " + sourceName + ". " + e + ". " + pod, e);
}
}
public String getNamespace() {
return namesapce;
}
public void setNamespace(String namespace) {
this.namesapce = namespace;
}
public boolean isThrowExceptionOnError() {
return throwExceptionOnError;
}
public void setThrowExceptionOnError(boolean throwExceptionOnError) {
this.throwExceptionOnError = throwExceptionOnError;
}
public boolean isProcessTemplatesLocally() {
return processTemplatesLocally;
}
public void setProcessTemplatesLocally(boolean processTemplatesLocally) {
this.processTemplatesLocally = processTemplatesLocally;
}
public boolean isDeletePodsOnReplicationControllerUpdate() {
return deletePodsOnReplicationControllerUpdate;
}
public void setDeletePodsOnReplicationControllerUpdate(boolean deletePodsOnReplicationControllerUpdate) {
this.deletePodsOnReplicationControllerUpdate = deletePodsOnReplicationControllerUpdate;
}
public File getLogJsonDir() {
return logJsonDir;
}
/**
* Lets you configure the directory where JSON logging files should go
*/
public void setLogJsonDir(File logJsonDir) {
this.logJsonDir = logJsonDir;
}
public File getBasedir() {
return basedir;
}
public void setBasedir(File basedir) {
this.basedir = basedir;
}
protected boolean isRunning(HasMetadata entity) {
return entity != null;
}
/**
* Logs an error applying some JSON to Kubernetes and optionally throws an exception
*/
protected void onApplyError(String message, Exception e) {
LOG.error(message, e);
if (throwExceptionOnError) {
throw new RuntimeException(message, e);
}
}
/**
* Returns true if this controller allows new resources to be created in the given namespace
*/
public boolean isAllowCreate() {
return allowCreate;
}
public void setAllowCreate(boolean allowCreate) {
this.allowCreate = allowCreate;
}
/**
* If enabled then updates are performed by deleting the resource first then creating it
*/
public boolean isRecreateMode() {
return recreateMode;
}
public void setRecreateMode(boolean recreateMode) {
this.recreateMode = recreateMode;
}
public void setServicesOnlyMode(boolean servicesOnlyMode) {
this.servicesOnlyMode = servicesOnlyMode;
}
/**
* If enabled then only services are created/updated to allow services to be created/updated across
* a number of apps before any pods/replication controllers are updated
*/
public boolean isServicesOnlyMode() {
return servicesOnlyMode;
}
/**
* If enabled then all services are ignored to avoid them being recreated. This is useful if you want to
* recreate ReplicationControllers and Pods but leave Services as they are to avoid the portalIP addresses
* changing
*/
public boolean isIgnoreServiceMode() {
return ignoreServiceMode;
}
public void setIgnoreServiceMode(boolean ignoreServiceMode) {
this.ignoreServiceMode = ignoreServiceMode;
}
public boolean isIgnoreRunningOAuthClients() {
return ignoreRunningOAuthClients;
}
public void setIgnoreRunningOAuthClients(boolean ignoreRunningOAuthClients) {
this.ignoreRunningOAuthClients = ignoreRunningOAuthClients;
}
public boolean isFailOnMissingParameterValue() {
return failOnMissingParameterValue;
}
public void setFailOnMissingParameterValue(boolean failOnMissingParameterValue) {
this.failOnMissingParameterValue = failOnMissingParameterValue;
}
public boolean isSupportOAuthClients() {
return supportOAuthClients;
}
public void setSupportOAuthClients(boolean supportOAuthClients) {
this.supportOAuthClients = supportOAuthClients;
}
}
| apache-2.0 |
igitras-java/custom-boot | custom-boot-core/src/main/java/com/igitras/cbframework/exception/internal/config/InvalidConfigItemException.java | 739 | package com.igitras.cbframework.exception.internal.config;
import static com.igitras.cbframework.exception.ErrorMessageBuilder.builder;
import static java.lang.String.format;
/**
* Invalid Configuration File Path exception. Such as file required but given folder, or something like this. Or the
* format of content is incorrect.
*
* @author mason
*/
public final class InvalidConfigItemException extends ConfigurationException {
private static final long serialVersionUID = -6139249101300386600L;
public InvalidConfigItemException(String itemName) {
// @formatter:off
super(format("Invalid configuration Item: [%s].", itemName), builder().addArguments(itemName).build());
// @formatter:on
}
}
| apache-2.0 |
Im-dex/xray-162 | code/engine/xrGame/WeaponUSP45.cpp | 242 | #include "pch_script.h"
#include "weaponusp45.h"
using namespace luabind;
#pragma optimize("s", on)
void CWeaponUSP45::script_register(lua_State* L) {
module(L)[class_<CWeaponUSP45, CGameObject>("CWeaponUSP45").def(constructor<>())];
}
| apache-2.0 |
santhosh-tekuri/jlibs | wadl/src/main/java/jlibs/wadl/cli/model/WADLReader.java | 5207 | /**
* Copyright 2015 Santhosh Kumar Tekuri
*
* The JLibs authors license this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package jlibs.wadl.cli.model;
import jlibs.core.net.URLUtil;
import jlibs.wadl.cli.Util;
import jlibs.wadl.model.*;
import javax.xml.bind.JAXBContext;
import java.util.HashSet;
import java.util.List;
/**
* @author Santhosh Kumar T
*/
public class WADLReader{
private Application app;
private HashSet<Object> inlined = new HashSet<Object>();
public Application read(String systemID) throws Exception{
if(systemID.startsWith("~/"))
systemID = Util.toFile(systemID).getAbsolutePath();
JAXBContext jc = JAXBContext.newInstance(Application.class.getPackage().getName());
app = (Application)jc.createUnmarshaller().unmarshal(URLUtil.toURL(systemID));
inline();
return app;
}
private void inline(){
for(Object item: app.getResourceTypeOrMethodOrRepresentation()){
if(item instanceof Method)
inline((Method)item);
}
for(Object item: app.getResourceTypeOrMethodOrRepresentation()){
if(item instanceof ResourceType)
inline((ResourceType)item);
}
for(Resources resources: app.getResources()){
for(Resource resource: resources.getResource())
inline(resource);
}
}
public void inline(List<Representation> representations){
for(int i=0; i<representations.size(); i++){
Representation representation = representations.get(i);
if(representation.getHref()!=null)
representations.set(i, getRepresentation(representation.getHref()));
}
}
public void inline(Method method){
if(!inlined.add(method))
return;
if(method.getRequest()!=null)
inline(method.getRequest().getRepresentation());
for(Response response: method.getResponse())
inline(response.getRepresentation());
}
public void inline(ResourceType rt){
if(!inlined.add(rt))
return;
for(int i=0; i<rt.getMethodOrResource().size(); i++){
Object item = rt.getMethodOrResource().get(i);
if(item instanceof Method){
Method method = (Method)item;
if(method.getHref()!=null)
rt.getMethodOrResource().set(i, getMethod(method.getHref()));
}else if(item instanceof Resource)
inline((Resource)item);
}
}
public void inline(Resource resource){
if(!inlined.add(resource))
return;
for(String type: resource.getType()){
ResourceType rt = getResourceType(type);
if(rt!=null){
inline(rt);
resource.getMethodOrResource().addAll(rt.getMethodOrResource());
}
}
for(int i=0; i<resource.getMethodOrResource().size(); i++){
Object item = resource.getMethodOrResource().get(i);
if(item instanceof Method){
Method method = (Method)item;
if(method.getHref()!=null)
resource.getMethodOrResource().set(i, getMethod(method.getHref()));
}else if(item instanceof Resource)
inline((Resource)item);
}
}
private ResourceType getResourceType(String ref){
ref = ref.substring(1);
for(Object item: app.getResourceTypeOrMethodOrRepresentation()){
if(item instanceof ResourceType){
ResourceType rt = (ResourceType)item;
if(rt.getId().equals(ref))
return rt;
}
}
throw new RuntimeException("cannot find resourceType with id: "+ref);
}
private Method getMethod(String ref){
ref = ref.substring(1);
for(Object item: app.getResourceTypeOrMethodOrRepresentation()){
if(item instanceof Method){
Method method = (Method)item;
if(method.getId().equals(ref))
return method;
}
}
throw new RuntimeException("cannot find method with id: "+ref);
}
private Representation getRepresentation(String ref){
ref = ref.substring(1);
for(Object item: app.getResourceTypeOrMethodOrRepresentation()){
if(item instanceof Representation){
Representation rep = (Representation)item;
if(rep.getId().equals(ref))
return rep;
}
}
throw new RuntimeException("cannot find representation with id: "+ref);
}
}
| apache-2.0 |
jonnyzzz/TeamCity.Virtual | common/src/com/jonnyzzz/teamcity/virtual/VMConstants.java | 1923 | /*
* Copyright 2000-2014 Eugene Petrenko
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jonnyzzz.teamcity.virtual;
/**
* @author Eugene Petrenko (eugene.petrenko@gmail.com)
*/
public class VMConstants {
public static final String RUN_TYPE = "jonnyzzz.vm";
public static final String VAGRANT_PROPERTY = "vagrant";
public static final String DOCKER_PROPERTY = "docker";
public static final String PARAMETER_VM = "vm";
public static final String VM_DOCKER = "docker";
public static final String VM_VAGRANT = "vagrant";
public static final String PARAMETER_SCRIPT = "script";
public static final String PARAMETER_CHECKOUT_MOUNT_POINT = "checkout-mount-point";
public static final String PARAMETER_SHELL = "default-shell-location";
public static final String DOCKER_MOUNT_MODE = "docker-mount-mode";
public static final String PARAMETER_DOCKER_IMAGE_NAME = "docker-image-name";
public static final String PARAMETER_DOCKER_CUSTOM_COMMANDLINE = "docker-commandline";
public static final String PARAMETER_VAGRANT_FILE = "vagrant-file";
public static final String PARAMETER_VAGRANT_CUSTOM_COMMANDLINE = "vagrant-commandline";
public static final String PARAMETER_VAGRANTFILE_CUSTOM_CONTENT = "vagrantfile-content";
public static final String PARAMETER_VAGRANTFILE_DO_OVERRIDE = "vagrantfile-do-override";
public static final String VAGRANT_FILE = "Vagrantfile";
}
| apache-2.0 |
czesiu/Luma.SimpleEntity | Src/Tests/Luma.SimpleEntity.Tests/CodeGenCustomAttributeGeneratorTests.cs | 23406 | using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Reflection;
using Luma.SimpleEntity.MetadataPipeline;
using Luma.SimpleEntity.Tests.Utilities;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using TestNamespace;
namespace Luma.SimpleEntity.Tests
{
/// <summary>
/// Tests CustomAttributeGenerator
/// </summary>
[TestClass]
public class CodeGenCustomAttributeGeneratorTests
{
[TestMethod]
[Description("CustomAttributeGenerator emits valid code when attribute is shared")]
public void CodeGen_CustomAttrGen_AttributeType_Shared()
{
// Create a shared type service that says the entity's attribute is "shared" when asked whether it is shared
var mockSts = new MockSharedCodeService(
new[] { typeof(Mock_CG_Attr_Gen_Type), typeof(Mock_CG_Attr_Gen_TestAttribute) },
new MethodBase[0],
new string[0]);
string generatedCode = TestHelper.GenerateCodeAssertSuccess("C#", new[] { typeof(Mock_CG_Attr_Gen_Entity) }, mockSts);
TestHelper.AssertGeneratedCodeContains(generatedCode, "[Mock_CG_Attr_Gen_Test(typeof(Mock_CG_Attr_Gen_Type))]");
}
[TestMethod]
[Description("CustomAttributeGenerator emits valid code for BindableAttribute using short and full type names")]
public void CodeGen_CustomAttrGen_BindableAttribute()
{
// Create a shared type service that says the entity's attribute is "shared" when asked whether it is shared
MockSharedCodeService mockSts = new MockSharedCodeService(
new[] { typeof(System.ComponentModel.BindableAttribute) },
new MethodBase[0],
new string[0]);
string generatedCode = TestHelper.GenerateCodeAssertSuccess("C#", new Type[] { typeof(Mock_CG_Attr_Entity_Bindable) }, new ConsoleLogger(), mockSts, /*useFullNames*/ false);
TestHelper.AssertGeneratedCodeContains(generatedCode, "[Bindable(true, BindingDirection.TwoWay)]");
generatedCode = TestHelper.GenerateCodeAssertSuccess("C#", new Type[] { typeof(Mock_CG_Attr_Entity_Bindable) }, new ConsoleLogger(), mockSts, true);
TestHelper.AssertGeneratedCodeContains(generatedCode, "[global::System.ComponentModel.BindableAttribute(true, global::System.ComponentModel.BindingDirection.TwoWay)]");
}
[TestMethod]
[Description("CustomAttributeGenerator emits error in generated code when attribute type is not shared")]
public void CodeGen_CustomAttrGen_AttributeType_NotShared()
{
ConsoleLogger logger = new ConsoleLogger();
// Create a shared type service that says the entity's attribute is "unshared" when asked whether it is shared
MockSharedCodeService mockSts = new MockSharedCodeService(
new Type[] { typeof(Mock_CG_Attr_Gen_Type) },
new MethodBase[0],
new string[0]);
mockSts.AddUnsharedType(typeof(Mock_CG_Attr_Gen_TestAttribute));
string generatedCode = TestHelper.GenerateCode("C#", new Type[] { typeof(Mock_CG_Attr_Gen_Entity) }, logger, mockSts);
string expectedWarning = string.Format(
CultureInfo.CurrentCulture,
Resource.ClientCodeGen_Attribute_RequiresDataAnnotations,
typeof(Mock_CG_Attr_Gen_TestAttribute),
"MockProject");
TestHelper.AssertContainsWarnings(logger, expectedWarning);
string warningComment = string.Format(
CultureInfo.CurrentCulture,
Resource.ClientCodeGen_Attribute_RequiresShared,
typeof(Mock_CG_Attr_Gen_TestAttribute),
"MockProject");
TestHelper.AssertGeneratedCodeContains(generatedCode, warningComment);
}
[TestMethod]
[Description("CustomAttributeGenerator emits error in generated code when cannot determine attribute is shared")]
public void CodeGen_CustomAttrGen_AttributeType_Shared_Unknowable()
{
var logger = new ConsoleLogger();
// Create a shared type service that says the entity's attribute is "unknowable" when asked whether it is shared
var mockSts = new MockSharedCodeService(
new Type[] { typeof(Mock_CG_Attr_Gen_Type) },
new MethodBase[0],
new string[0]);
mockSts.AddUnknowableType(typeof(Mock_CG_Attr_Gen_TestAttribute));
var generatedCode = TestHelper.GenerateCode("C#", new[] { typeof(Mock_CG_Attr_Gen_Entity) }, logger, mockSts);
TestHelper.AssertNoErrorsOrWarnings(logger);
var warningComment = string.Format(
CultureInfo.CurrentCulture,
Resource.ClientCodeGen_Attribute_RequiresShared_NoPDB,
typeof(Mock_CG_Attr_Gen_TestAttribute),
typeof(Mock_CG_Attr_Gen_TestAttribute).Assembly.GetName().Name,
"MockProject");
// CodeDom injects comments after line breaks
warningComment = warningComment.Replace("\r\n ", "\r\n // ");
TestHelper.AssertGeneratedCodeContains(generatedCode, warningComment);
}
[TestMethod]
[Description("CustomAttributeGenerator emits warning when attribute contains typeof of an unshared type")]
public void CodeGen_CustomAttrGen_Attribute_References_Type_NotShared()
{
ConsoleLogger logger = new ConsoleLogger();
// Create a shared type service that says the entity's attribute is "shared" when asked whether it is shared
MockSharedCodeService mockSts = new MockSharedCodeService(
new Type[] { typeof(Mock_CG_Attr_Gen_TestAttribute) },
new MethodBase[0],
new string[0]);
// Explicitly make the typeof() ref in the attribute say it is unshared
mockSts.AddUnsharedType(typeof(Mock_CG_Attr_Gen_Type));
string generatedCode = TestHelper.GenerateCode("C#", new Type[] { typeof(Mock_CG_Attr_Gen_Entity) }, logger, mockSts);
TestHelper.AssertNoErrorsOrWarnings(logger);
string warningComment = string.Format(
CultureInfo.CurrentCulture,
Resource.ClientCodeGen_Attribute_RequiresShared,
typeof(Mock_CG_Attr_Gen_TestAttribute),
"MockProject");
TestHelper.AssertGeneratedCodeContains(generatedCode, "[Mock_CG_Attr_Gen_Test(typeof(global::Luma.SimpleEntity.Tests.Mock_CG_Attr_Gen_Type))]");
}
[TestMethod]
[Description("CustomAttributeGenerator emits warning when attribute contains typeof with type we cannot determine is shared")]
public void CodeGen_CustomAttrGen_Attribute_References_Type_NotKnowable()
{
var logger = new ConsoleLogger();
// Create a shared type service that says the entity's attribute is "shared" when asked whether it is shared
MockSharedCodeService mockSts = new MockSharedCodeService(
new[] { typeof(Mock_CG_Attr_Gen_TestAttribute) },
new MethodBase[0],
new string[0]);
// Explicitly make the typeof() ref in the attribute say it is unshared
mockSts.AddUnknowableType(typeof(Mock_CG_Attr_Gen_Type));
string generatedCode = TestHelper.GenerateCodeAssertSuccess("C#", new[] { typeof(Mock_CG_Attr_Gen_Entity) }, logger, mockSts);
string warningComment = string.Format(
CultureInfo.CurrentCulture,
Resource.ClientCodeGen_Attribute_RequiresShared,
typeof(Mock_CG_Attr_Gen_TestAttribute),
"MockProject");
TestHelper.AssertGeneratedCodeContains(generatedCode, "[Mock_CG_Attr_Gen_Test(typeof(global::Luma.SimpleEntity.Tests.Mock_CG_Attr_Gen_Type))]");
}
[TestMethod]
[Description("CustomAttributeGenerator emits error in generated code when an entity attribute throws an exception")]
public void CodeGen_CustomAttrGen_EntityAttributeThrows()
{
ConsoleLogger logger = new ConsoleLogger();
string generatedCode = TestHelper.GenerateCode("C#", typeof(AttributeThrowingEntity), logger);
Assert.IsFalse(string.IsNullOrEmpty(generatedCode), "Code should have been generated");
AttributeBuilderException expectedException = new AttributeBuilderException(
new ThrowingEntityAttributeException(ThrowingEntityAttribute.ExceptionMessage),
typeof(ThrowingEntityAttribute),
ThrowingEntityAttribute.ThrowingPropertyName);
string expectedBuildWarning = string.Format(
CultureInfo.CurrentCulture,
Resource.ClientCodeGen_Attribute_ThrewException_CodeType,
expectedException.Message,
typeof(AttributeThrowingEntity).Name,
expectedException.InnerException.Message);
TestHelper.AssertGeneratedCodeContains(generatedCode, expectedException.Message);
TestHelper.AssertContainsWarnings(logger, expectedBuildWarning);
}
[TestMethod]
[Description("CustomAttributeGenerator emits error in generated code when an entity property attribute throws an exception")]
public void CodeGen_CustomAttrGen_EntityPropertyAttributeThrows()
{
ConsoleLogger logger = new ConsoleLogger();
string generatedCode = TestHelper.GenerateCode("C#", typeof(AttributeThrowingEntity), logger);
Assert.IsFalse(string.IsNullOrEmpty(generatedCode), "Code should have been generated");
AttributeBuilderException expectedException = new AttributeBuilderException(
new ThrowingEntityPropertyAttributeException(ThrowingEntityPropertyAttribute.ExceptionMessage),
typeof(ThrowingEntityPropertyAttribute),
ThrowingEntityPropertyAttribute.ThrowingPropertyName);
string expectedBuildWarning = string.Format(
CultureInfo.CurrentCulture,
Resource.ClientCodeGen_Attribute_ThrewException_CodeTypeMember,
expectedException.Message,
AttributeThrowingEntity.ThrowingPropertyName,
typeof(AttributeThrowingEntity).Name,
expectedException.InnerException.Message);
TestHelper.AssertGeneratedCodeContains(generatedCode, expectedException.Message);
TestHelper.AssertContainsWarnings(logger, expectedBuildWarning);
}
[TestMethod]
[Description("CustomAttributeGenerator emits error in generated code when an entity association attribute throws an exception")]
public void CodeGen_CustomAttrGen_EntityAssociationAttributeThrows()
{
ConsoleLogger logger = new ConsoleLogger();
string generatedCode = TestHelper.GenerateCode("C#", typeof(AttributeThrowingEntity), logger);
Assert.IsFalse(string.IsNullOrEmpty(generatedCode), "Code should have been generated");
AttributeBuilderException expectedException = new AttributeBuilderException(
new ThrowingEntityAssociationAttributeException(ThrowingEntityAssociationAttribute.ExceptionMessage),
typeof(ThrowingEntityAssociationAttribute),
ThrowingEntityAssociationAttribute.ThrowingPropertyName);
string expectedBuildWarning = string.Format(
CultureInfo.CurrentCulture,
Resource.ClientCodeGen_Attribute_ThrewException_CodeTypeMember,
expectedException.Message,
AttributeThrowingEntity.ThrowingAssociationProperty,
typeof(AttributeThrowingEntity).Name,
expectedException.InnerException.Message);
TestHelper.AssertGeneratedCodeContains(generatedCode, expectedException.Message);
TestHelper.AssertContainsWarnings(logger, expectedBuildWarning);
}
[TestMethod]
[Description("CustomAttributeGenerator emits error in generated code when an entity association collection attribute throws an exception")]
public void CodeGen_CustomAttrGen_EntityAssociationCollectionAttributeThrows()
{
ConsoleLogger logger = new ConsoleLogger();
string generatedCode = TestHelper.GenerateCode("C#", typeof(AttributeThrowingEntity), logger);
Assert.IsFalse(string.IsNullOrEmpty(generatedCode), "Code should have been generated");
AttributeBuilderException expectedException = new AttributeBuilderException(
new ThrowingEntityAssociationCollectionAttributeException(ThrowingEntityAssociationCollectionAttribute.ExceptionMessage),
typeof(ThrowingEntityAssociationCollectionAttribute),
ThrowingEntityAssociationCollectionAttribute.ThrowingPropertyName);
string expectedBuildWarning = string.Format(
CultureInfo.CurrentCulture,
Resource.ClientCodeGen_Attribute_ThrewException_CodeTypeMember,
expectedException.Message,
AttributeThrowingEntity.ThrowingAssociationCollectionProperty,
typeof(AttributeThrowingEntity).Name,
expectedException.InnerException.Message);
TestHelper.AssertGeneratedCodeContains(generatedCode, expectedException.Message);
TestHelper.AssertContainsWarnings(logger, expectedBuildWarning);
}
[TestMethod]
[Description("Checks which constructor gets selected for an attribute when the actual default value is passed as parameter")]
[DeploymentItem(@"Luma.SimpleEntity.Tests\ProjectPath.txt", "CG_CUST_ATTR_CS")]
public void CodeGen_CustomAttrGen_CtrSelectorTest()
{
using(var asmGen = new AssemblyGenerator("CG_CUST_ATTR_CS", /*isCSharp*/ true, /*useFullTypeNames*/ false, new[] { typeof(DummyEntityForAttribTest) }))
{
// Force the Attribute types to be shared
asmGen.MockSharedCodeService.AddSharedType(typeof(Mock_CG_Attr_Gen_TestAttrib1));
asmGen.MockSharedCodeService.AddSharedType(typeof(Mock_CG_Attr_Gen_TestAttrib2));
asmGen.MockSharedCodeService.AddSharedType(typeof(Mock_CG_Attr_Gen_TestAttrib3));
asmGen.MockSharedCodeService.AddSharedType(typeof(Mock_CG_Attr_Gen_TestAttrib4));
asmGen.ReferenceAssemblies.Add(Assembly.GetExecutingAssembly().ManifestModule.FullyQualifiedName);
string generatedCode = asmGen.GeneratedCode;
Assert.IsFalse(string.IsNullOrEmpty(generatedCode), "Failed to generate code:\r\n" + asmGen.ConsoleLogger.Errors);
Assembly assy = asmGen.GeneratedAssembly;
Assert.IsNotNull(assy, "Assembly failed to build: " + asmGen.ConsoleLogger.Errors);
Type clientEntityType = asmGen.GetGeneratedType(typeof(DummyEntityForAttribTest).FullName);
MemberInfo[] prop1 = clientEntityType.GetMember("Prop1");
IList<CustomAttributeData> cads1 = AssemblyGenerator.GetCustomAttributeData(prop1[0], typeof(Mock_CG_Attr_Gen_TestAttrib1));
Assert.AreEqual(1, cads1.Count, "Expected Mock_CG_Attr_Gen_TestAttrib1 on " + clientEntityType + ".Prop1");
//Check if the default constructor was used
CustomAttributeData cad = cads1[0];
IList<CustomAttributeTypedArgument> ctr1args = cad.ConstructorArguments;
Assert.AreEqual(ctr1args.Count, 0);
MemberInfo[] prop2 = clientEntityType.GetMember("Prop2");
IList<CustomAttributeData> cads2 = AssemblyGenerator.GetCustomAttributeData(prop2[0], typeof(System.ComponentModel.DataAnnotations.Mock_CG_Attr_Gen_TestAttrib2));
Assert.AreEqual(1, cads2.Count, "Expected Mock_CG_Attr_Gen_TestAttrib2 on " + clientEntityType + ".Prop2");
cad = cads2[0];
//Check if the constructor with one int param was used
IList<CustomAttributeTypedArgument> ctr2args = cad.ConstructorArguments;
Assert.AreEqual(ctr2args.Count, 1);
Assert.AreEqual(ctr2args[0].ArgumentType, typeof(int));
Assert.AreEqual(ctr2args[0].Value, 0);
MemberInfo[] prop3 = clientEntityType.GetMember("Prop3");
IList<CustomAttributeData> cads3 = AssemblyGenerator.GetCustomAttributeData(prop3[0], typeof(System.ComponentModel.DataAnnotations.Mock_CG_Attr_Gen_TestAttrib3));
Assert.AreEqual(1, cads3.Count, "Expected Mock_CG_Attr_Gen_TestAttrib3 on " + clientEntityType + ".Prop3");
cad = cads3[0];
// Check if the ctor with one string param was used
IList<CustomAttributeTypedArgument> ctr3args = cad.ConstructorArguments;
Assert.AreEqual(ctr3args.Count, 1);
Assert.AreEqual(ctr3args[0].ArgumentType, typeof(string));
Assert.AreEqual(ctr3args[0].Value, null);
MemberInfo[] prop4 = clientEntityType.GetMember("Prop4");
IList<CustomAttributeData> cads4 = AssemblyGenerator.GetCustomAttributeData(prop4[0], typeof(System.ComponentModel.DataAnnotations.Mock_CG_Attr_Gen_TestAttrib4));
Assert.AreEqual(1, cads4.Count, "Expected Mock_CG_Attr_Gen_TestAttrib4 on " + clientEntityType + ".Prop4");
cad = cads4[0];
// Check if the first ctor was used
IList<CustomAttributeTypedArgument> ctr4args = cad.ConstructorArguments;
Assert.AreEqual(ctr4args.Count, 1);
Assert.AreEqual(ctr4args[0].ArgumentType, typeof(int));
Assert.AreEqual(ctr4args[0].Value, 0);
}
}
//[TestMethod]
//[Description("CustomAttributeGenerator generates full names correctly for attributes in VB.")]
//public void CodeGen_ServiceKnownTypeAttrGen_VB_FullNames()
//{
// string generatedCode = TestHelper.GenerateCodeAssertSuccess("VB", new Type[] { typeof(DSWithCustomMethod) }, null, null, /* generateFullNames */ true);
// TestHelper.AssertGeneratedCodeContains(generatedCode, "Global.System.ServiceModel.ServiceKnownTypeAttribute(GetType(Global.TestNamespace.Address))");
//}
//[TestMethod]
//[Description("CustomAttributeGenerator generates full names correctly for types in attributes not shared on the client in VB.")]
//public void CodeGen_ServiceKnownTypeAttrGen_VB_NoFullNames()
//{
// string generatedCode = TestHelper.GenerateCodeAssertSuccess("VB", new Type[] { typeof(DSWithCustomMethod) }, null, null, /* generateFullNames */ false);
// TestHelper.AssertGeneratedCodeContains(generatedCode, "ServiceKnownType(GetType(Global.TestNamespace.Address))");
//}
}
public class DummyEntityForCustomMethodTest
{
[Key]
public int Key { get; set; }
}
//Dummy entity type
public class DummyEntityForAttribTest
{
[Key]
public int ID { get; set; }
[Mock_CG_Attr_Gen_TestAttrib1(0)]
public int Prop1 { get; set; }
[Mock_CG_Attr_Gen_TestAttrib2(0)]
public int Prop2 { get; set; }
[Mock_CG_Attr_Gen_TestAttrib3(null)]
public int Prop3 { get; set; }
[Mock_CG_Attr_Gen_TestAttrib4(null)]
public int Prop4 { get; set; }
}
// Arbitrary type we mention in the attribute
public class Mock_CG_Attr_Gen_Type
{
}
public class Mock_CG_Attr_Gen_Entity
{
public Mock_CG_Attr_Gen_Entity() { }
[Key]
[Mock_CG_Attr_Gen_Test(typeof(Mock_CG_Attr_Gen_Type))]
public string StringProperty { get; set; }
}
public class Mock_CG_Attr_Entity_Bindable
{
[Key]
[System.ComponentModel.Bindable(true, System.ComponentModel.BindingDirection.TwoWay)]
public int K { get; set; }
}
}
// The code generator only emits errors for attributes in the System.ComponentModel.DataAnnotations namespace to
// prevent attributes that are server-only by design from generating build warnings or comments in the code gen.
// For instance: [EnableClientAccess], [Query], [Invoke], and DAL-specific attributes.
namespace System.ComponentModel.DataAnnotations
{
// Attribute we will declare as "unknowable" or "unshared" in tests above
public class Mock_CG_Attr_Gen_TestAttribute : Attribute
{
private Type _type;
public Mock_CG_Attr_Gen_TestAttribute(Type type)
{
this._type = type;
}
public Type Type { get { return this._type; } }
}
public class Mock_CG_Attr_Gen_TestAttrib1 : Attribute
{
public Mock_CG_Attr_Gen_TestAttrib1()
{
IntProp = 0;
StrProp = null;
}
public Mock_CG_Attr_Gen_TestAttrib1(int intProp)
{
IntProp = intProp;
StrProp = null;
}
public int IntProp { get; set; }
public string StrProp { get; set; }
}
public class Mock_CG_Attr_Gen_TestAttrib2 : Attribute
{
public Mock_CG_Attr_Gen_TestAttrib2(string str)
{
}
public Mock_CG_Attr_Gen_TestAttrib2(int intProp, string str)
{
IntProp = intProp;
}
public Mock_CG_Attr_Gen_TestAttrib2(int intProp)
{
IntProp = intProp;
}
public int IntProp { get; set; }
}
public class Mock_CG_Attr_Gen_TestAttrib3 : Attribute
{
public Mock_CG_Attr_Gen_TestAttrib3(int intProp)
{
StrProp = null;
}
public Mock_CG_Attr_Gen_TestAttrib3(string str)
{
StrProp = str;
}
public Mock_CG_Attr_Gen_TestAttrib3(int intProp, string str)
{
StrProp = str;
}
public string StrProp { get; set; }
}
public class Mock_CG_Attr_Gen_TestAttrib4 : Attribute
{
public Mock_CG_Attr_Gen_TestAttrib4(int intProp)
{
StrProp = null;
IntProp = intProp;
}
public Mock_CG_Attr_Gen_TestAttrib4(string str)
{
StrProp = str;
IntProp = 0;
}
public Mock_CG_Attr_Gen_TestAttrib4(int intProp, string str)
{
StrProp = str;
IntProp = intProp;
}
public int IntProp { get; set; }
public string StrProp { get; set; }
}
}
| apache-2.0 |
tlcowling/confmake | lib/confmake.rb | 154 | require 'confmake/env_reader'
require 'confmake/conf_reader'
require 'confmake/property_reader'
require 'confmake/bash_parser'
require 'confmake/version'
| apache-2.0 |
sslavic/kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/WorkerGroupMember.java | 10119 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.connect.runtime.distributed;
import org.apache.kafka.clients.ApiVersions;
import org.apache.kafka.clients.ClientDnsLookup;
import org.apache.kafka.clients.ClientUtils;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.Metadata;
import org.apache.kafka.clients.NetworkClient;
import org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient;
import org.apache.kafka.clients.GroupRebalanceConfig;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.internals.ClusterResourceListeners;
import org.apache.kafka.common.metrics.JmxReporter;
import org.apache.kafka.common.metrics.MetricConfig;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.MetricsReporter;
import org.apache.kafka.common.network.ChannelBuilder;
import org.apache.kafka.common.network.Selector;
import org.apache.kafka.common.utils.AppInfoParser;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.connect.storage.ConfigBackingStore;
import org.apache.kafka.connect.util.ConnectorTaskId;
import org.slf4j.Logger;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
/**
* This class manages the coordination process with brokers for the Connect cluster group membership. It ties together
* the Coordinator, which implements the group member protocol, with all the other pieces needed to drive the connection
* to the group coordinator broker. This isolates all the networking to a single thread managed by this class, with
* higher level operations in response to group membership events being handled by the herder.
*/
public class WorkerGroupMember {
private static final String JMX_PREFIX = "kafka.connect";
private final Logger log;
private final Time time;
private final String clientId;
private final ConsumerNetworkClient client;
private final Metrics metrics;
private final Metadata metadata;
private final long retryBackoffMs;
private final WorkerCoordinator coordinator;
private boolean stopped = false;
public WorkerGroupMember(DistributedConfig config,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener,
Time time,
String clientId,
LogContext logContext) {
try {
this.time = time;
this.clientId = clientId;
this.log = logContext.logger(WorkerGroupMember.class);
Map<String, String> metricsTags = new LinkedHashMap<>();
metricsTags.put("client-id", clientId);
MetricConfig metricConfig = new MetricConfig().samples(config.getInt(CommonClientConfigs.METRICS_NUM_SAMPLES_CONFIG))
.timeWindow(config.getLong(CommonClientConfigs.METRICS_SAMPLE_WINDOW_MS_CONFIG), TimeUnit.MILLISECONDS)
.tags(metricsTags);
List<MetricsReporter> reporters = config.getConfiguredInstances(CommonClientConfigs.METRIC_REPORTER_CLASSES_CONFIG,
MetricsReporter.class,
Collections.singletonMap(CommonClientConfigs.CLIENT_ID_CONFIG, clientId));
JmxReporter jmxReporter = new JmxReporter(JMX_PREFIX);
jmxReporter.configure(config.originals());
reporters.add(jmxReporter);
this.metrics = new Metrics(metricConfig, reporters, time);
this.retryBackoffMs = config.getLong(CommonClientConfigs.RETRY_BACKOFF_MS_CONFIG);
this.metadata = new Metadata(retryBackoffMs, config.getLong(CommonClientConfigs.METADATA_MAX_AGE_CONFIG),
logContext, new ClusterResourceListeners());
List<InetSocketAddress> addresses = ClientUtils.parseAndValidateAddresses(
config.getList(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG),
config.getString(CommonClientConfigs.CLIENT_DNS_LOOKUP_CONFIG));
this.metadata.bootstrap(addresses);
String metricGrpPrefix = "connect";
ChannelBuilder channelBuilder = ClientUtils.createChannelBuilder(config, time, logContext);
NetworkClient netClient = new NetworkClient(
new Selector(config.getLong(CommonClientConfigs.CONNECTIONS_MAX_IDLE_MS_CONFIG), metrics, time, metricGrpPrefix, channelBuilder, logContext),
this.metadata,
clientId,
100, // a fixed large enough value will suffice
config.getLong(CommonClientConfigs.RECONNECT_BACKOFF_MS_CONFIG),
config.getLong(CommonClientConfigs.RECONNECT_BACKOFF_MAX_MS_CONFIG),
config.getInt(CommonClientConfigs.SEND_BUFFER_CONFIG),
config.getInt(CommonClientConfigs.RECEIVE_BUFFER_CONFIG),
config.getInt(CommonClientConfigs.REQUEST_TIMEOUT_MS_CONFIG),
ClientDnsLookup.forConfig(config.getString(CommonClientConfigs.CLIENT_DNS_LOOKUP_CONFIG)),
time,
true,
new ApiVersions(),
logContext);
this.client = new ConsumerNetworkClient(
logContext,
netClient,
metadata,
time,
retryBackoffMs,
config.getInt(CommonClientConfigs.REQUEST_TIMEOUT_MS_CONFIG),
Integer.MAX_VALUE);
this.coordinator = new WorkerCoordinator(
new GroupRebalanceConfig(config, GroupRebalanceConfig.ProtocolType.CONNECT),
logContext,
this.client,
metrics,
metricGrpPrefix,
this.time,
restUrl,
configStorage,
listener,
ConnectProtocolCompatibility.compatibility(config.getString(DistributedConfig.CONNECT_PROTOCOL_CONFIG)),
config.getInt(DistributedConfig.SCHEDULED_REBALANCE_MAX_DELAY_MS_CONFIG));
AppInfoParser.registerAppInfo(JMX_PREFIX, clientId, metrics, time.milliseconds());
log.debug("Connect group member created");
} catch (Throwable t) {
// call close methods if internal objects are already constructed
// this is to prevent resource leak. see KAFKA-2121
stop(true);
// now propagate the exception
throw new KafkaException("Failed to construct kafka consumer", t);
}
}
public void stop() {
if (stopped) return;
stop(false);
}
/**
* Ensure that the connection to the broker coordinator is up and that the worker is an
* active member of the group.
*/
public void ensureActive() {
coordinator.poll(0);
}
public void poll(long timeout) {
if (timeout < 0)
throw new IllegalArgumentException("Timeout must not be negative");
coordinator.poll(timeout);
}
/**
* Interrupt any running poll() calls, causing a WakeupException to be thrown in the thread invoking that method.
*/
public void wakeup() {
this.client.wakeup();
}
/**
* Get the member ID of this worker in the group of workers.
*
* This ID is the unique member ID automatically generated.
*
* @return the member ID
*/
public String memberId() {
return coordinator.memberId();
}
public void requestRejoin() {
coordinator.requestRejoin();
}
public void maybeLeaveGroup(String leaveReason) {
coordinator.maybeLeaveGroup(leaveReason);
}
public String ownerUrl(String connector) {
return coordinator.ownerUrl(connector);
}
public String ownerUrl(ConnectorTaskId task) {
return coordinator.ownerUrl(task);
}
/**
* Get the version of the connect protocol that is currently active in the group of workers.
*
* @return the current connect protocol version
*/
public short currentProtocolVersion() {
return coordinator.currentProtocolVersion();
}
private void stop(boolean swallowException) {
log.trace("Stopping the Connect group member.");
AtomicReference<Throwable> firstException = new AtomicReference<>();
this.stopped = true;
Utils.closeQuietly(coordinator, "coordinator", firstException);
Utils.closeQuietly(metrics, "consumer metrics", firstException);
Utils.closeQuietly(client, "consumer network client", firstException);
AppInfoParser.unregisterAppInfo(JMX_PREFIX, clientId, metrics);
if (firstException.get() != null && !swallowException)
throw new KafkaException("Failed to stop the Connect group member", firstException.get());
else
log.debug("The Connect group member has stopped.");
}
}
| apache-2.0 |
wecatch/app-turbo | demos/helpers/user/user.py | 311 | #-*- coding:utf-8 -*-
from datetime import datetime, timedelta
from pymongo import DESCENDING, ASCENDING
from models.user import model as user_model
from helpers import settings
MODEL_SLOTS = ['User']
class User(user_model.User):
def hello_user(self):
self.instance('user.User').find_one()
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-osconfig/v1alpha/1.31.0/com/google/api/services/osconfig/v1alpha/model/InstanceOSPoliciesComplianceOSPolicyCompliance.java | 4932 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.osconfig.v1alpha.model;
/**
* Compliance data for an OS policy
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the OS Config API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class InstanceOSPoliciesComplianceOSPolicyCompliance extends com.google.api.client.json.GenericJson {
/**
* Reference to the `OSPolicyAssignment` API resource that the `OSPolicy` belongs to. Format: `pro
* jects/{project_number}/locations/{location}/osPolicyAssignments/{os_policy_assignment_id@revisi
* on_id}`
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String osPolicyAssignment;
/**
* The OS policy id
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String osPolicyId;
/**
* Compliance data for each `OSPolicyResource` that is applied to the VM.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<OSPolicyResourceCompliance> osPolicyResourceCompliances;
/**
* Compliance state of the OS policy.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String state;
/**
* Reference to the `OSPolicyAssignment` API resource that the `OSPolicy` belongs to. Format: `pro
* jects/{project_number}/locations/{location}/osPolicyAssignments/{os_policy_assignment_id@revisi
* on_id}`
* @return value or {@code null} for none
*/
public java.lang.String getOsPolicyAssignment() {
return osPolicyAssignment;
}
/**
* Reference to the `OSPolicyAssignment` API resource that the `OSPolicy` belongs to. Format: `pro
* jects/{project_number}/locations/{location}/osPolicyAssignments/{os_policy_assignment_id@revisi
* on_id}`
* @param osPolicyAssignment osPolicyAssignment or {@code null} for none
*/
public InstanceOSPoliciesComplianceOSPolicyCompliance setOsPolicyAssignment(java.lang.String osPolicyAssignment) {
this.osPolicyAssignment = osPolicyAssignment;
return this;
}
/**
* The OS policy id
* @return value or {@code null} for none
*/
public java.lang.String getOsPolicyId() {
return osPolicyId;
}
/**
* The OS policy id
* @param osPolicyId osPolicyId or {@code null} for none
*/
public InstanceOSPoliciesComplianceOSPolicyCompliance setOsPolicyId(java.lang.String osPolicyId) {
this.osPolicyId = osPolicyId;
return this;
}
/**
* Compliance data for each `OSPolicyResource` that is applied to the VM.
* @return value or {@code null} for none
*/
public java.util.List<OSPolicyResourceCompliance> getOsPolicyResourceCompliances() {
return osPolicyResourceCompliances;
}
/**
* Compliance data for each `OSPolicyResource` that is applied to the VM.
* @param osPolicyResourceCompliances osPolicyResourceCompliances or {@code null} for none
*/
public InstanceOSPoliciesComplianceOSPolicyCompliance setOsPolicyResourceCompliances(java.util.List<OSPolicyResourceCompliance> osPolicyResourceCompliances) {
this.osPolicyResourceCompliances = osPolicyResourceCompliances;
return this;
}
/**
* Compliance state of the OS policy.
* @return value or {@code null} for none
*/
public java.lang.String getState() {
return state;
}
/**
* Compliance state of the OS policy.
* @param state state or {@code null} for none
*/
public InstanceOSPoliciesComplianceOSPolicyCompliance setState(java.lang.String state) {
this.state = state;
return this;
}
@Override
public InstanceOSPoliciesComplianceOSPolicyCompliance set(String fieldName, Object value) {
return (InstanceOSPoliciesComplianceOSPolicyCompliance) super.set(fieldName, value);
}
@Override
public InstanceOSPoliciesComplianceOSPolicyCompliance clone() {
return (InstanceOSPoliciesComplianceOSPolicyCompliance) super.clone();
}
}
| apache-2.0 |
russbishop/swift | lib/AST/GenericSignature.cpp | 21139 | //===--- GenericSignature.cpp - Generic Signature AST ---------------------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See http://swift.org/LICENSE.txt for license information
// See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
//
// This file implements the GenericSignature class.
//
//===----------------------------------------------------------------------===//
#include "swift/AST/GenericSignature.h"
#include "swift/AST/ASTContext.h"
#include "swift/AST/Decl.h"
#include "swift/AST/Module.h"
#include "swift/AST/Types.h"
using namespace swift;
GenericSignature::GenericSignature(ArrayRef<GenericTypeParamType *> params,
ArrayRef<Requirement> requirements,
bool isKnownCanonical)
: NumGenericParams(params.size()), NumRequirements(requirements.size()),
CanonicalSignatureOrASTContext()
{
auto paramsBuffer = getGenericParamsBuffer();
for (unsigned i = 0; i < NumGenericParams; ++i) {
paramsBuffer[i] = params[i];
}
auto reqtsBuffer = getRequirementsBuffer();
for (unsigned i = 0; i < NumRequirements; ++i) {
reqtsBuffer[i] = requirements[i];
}
if (isKnownCanonical)
CanonicalSignatureOrASTContext = &getASTContext(params, requirements);
}
ArrayRef<GenericTypeParamType *>
GenericSignature::getInnermostGenericParams() const {
auto params = getGenericParams();
// Find the point at which the depth changes.
unsigned depth = params.back()->getDepth();
for (unsigned n = params.size(); n > 0; --n) {
if (params[n-1]->getDepth() != depth) {
return params.slice(n);
}
}
// All parameters are at the same depth.
return params;
}
ASTContext &GenericSignature::getASTContext(
ArrayRef<swift::GenericTypeParamType *> params,
ArrayRef<swift::Requirement> requirements) {
// The params and requirements cannot both be empty.
if (!params.empty())
return params.front()->getASTContext();
else
return requirements.front().getFirstType()->getASTContext();
}
ArchetypeBuilder *GenericSignature::getArchetypeBuilder(ModuleDecl &mod) {
// The archetype builder is associated with the canonical signature.
if (!isCanonical())
return getCanonicalSignature()->getArchetypeBuilder(mod);
// Archetype builders are stored on the ASTContext.
return getASTContext().getOrCreateArchetypeBuilder(CanGenericSignature(this),
&mod);
}
bool GenericSignature::isCanonical() const {
if (CanonicalSignatureOrASTContext.is<ASTContext*>()) return true;
return getCanonicalSignature() == this;
}
CanGenericSignature GenericSignature::getCanonical(
ArrayRef<GenericTypeParamType *> params,
ArrayRef<Requirement> requirements) {
// Canonicalize the parameters and requirements.
SmallVector<GenericTypeParamType*, 8> canonicalParams;
canonicalParams.reserve(params.size());
for (auto param : params) {
canonicalParams.push_back(cast<GenericTypeParamType>(param->getCanonicalType()));
}
SmallVector<Requirement, 8> canonicalRequirements;
canonicalRequirements.reserve(requirements.size());
for (auto &reqt : requirements) {
canonicalRequirements.push_back(Requirement(reqt.getKind(),
reqt.getFirstType()->getCanonicalType(),
reqt.getSecondType().getCanonicalTypeOrNull()));
}
auto canSig = get(canonicalParams, canonicalRequirements,
/*isKnownCanonical=*/true);
return CanGenericSignature(canSig);
}
CanGenericSignature
GenericSignature::getCanonicalSignature() const {
// If we haven't computed the canonical signature yet, do so now.
if (CanonicalSignatureOrASTContext.isNull()) {
// Compute the canonical signature.
CanGenericSignature canSig = getCanonical(getGenericParams(),
getRequirements());
// Record either the canonical signature or an indication that
// this is the canonical signature.
if (canSig != this)
CanonicalSignatureOrASTContext = canSig;
else
CanonicalSignatureOrASTContext = &getGenericParams()[0]->getASTContext();
// Return the canonical signature.
return canSig;
}
// A stored ASTContext indicates that this is the canonical
// signature.
if (CanonicalSignatureOrASTContext.is<ASTContext*>())
// TODO: CanGenericSignature should be const-correct.
return CanGenericSignature(const_cast<GenericSignature*>(this));
// Otherwise, return the stored canonical signature.
return CanGenericSignature(
CanonicalSignatureOrASTContext.get<GenericSignature*>());
}
/// Canonical ordering for dependent types in generic signatures.
static int compareDependentTypes(const CanType *pa, const CanType *pb) {
auto a = *pa, b = *pb;
// Fast-path check for equality.
if (a == b)
return 0;
// Ordering is as follows:
// - Generic params
if (auto gpa = dyn_cast<GenericTypeParamType>(a)) {
if (auto gpb = dyn_cast<GenericTypeParamType>(b)) {
// - by depth, so t_0_n < t_1_m
if (int compareDepth = gpa->getDepth() - gpb->getDepth())
return compareDepth;
// - by index, so t_n_0 < t_n_1
return gpa->getIndex() - gpb->getIndex();
}
return -1;
}
// - Dependent members
if (auto dma = dyn_cast<DependentMemberType>(a)) {
if (isa<GenericTypeParamType>(b))
return +1;
if (auto dmb = dyn_cast<DependentMemberType>(b)) {
// - by base, so t_0_n.`P.T` < t_1_m.`P.T`
auto abase = dma.getBase();
auto bbase = dmb.getBase();
if (int compareBases = compareDependentTypes(&abase, &bbase))
return compareBases;
// - by protocol, so t_n_m.`P.T` < t_n_m.`Q.T` (given P < Q)
auto protoa = dma->getAssocType()->getProtocol();
auto protob = dmb->getAssocType()->getProtocol();
if (int compareProtocols
= ProtocolType::compareProtocols(&protoa, &protob))
return compareProtocols;
// - by name, so t_n_m.`P.T` < t_n_m.`P.U`
return dma->getAssocType()->getName().str().compare(
dmb->getAssocType()->getName().str());
}
return -1;
}
// - Other types.
//
// There should only ever be one of these in a set of constraints related to
// a dependent type, so the ordering among other types does not matter.
if (isa<GenericTypeParamType>(b) || isa<DependentMemberType>(b))
return +1;
return 0;
}
CanGenericSignature
GenericSignature::getCanonicalManglingSignature(ModuleDecl &M) const {
// Start from the elementwise-canonical signature.
auto canonical = getCanonicalSignature();
auto &Context = canonical->getASTContext();
// See if we cached the mangling signature.
auto cached = Context.ManglingSignatures.find({canonical, &M});
if (cached != Context.ManglingSignatures.end()) {
return cached->second;
}
// Otherwise, we need to compute it.
// Dump the generic signature into an ArchetypeBuilder that will figure out
// the minimal set of requirements.
std::unique_ptr<ArchetypeBuilder> builder(new ArchetypeBuilder(M,
Context.Diags));
builder->addGenericSignature(canonical, /*adoptArchetypes*/ false,
/*treatRequirementsAsExplicit*/ true);
// Sort out the requirements.
struct DependentConstraints {
CanType baseClass;
SmallVector<CanType, 2> protocols;
};
SmallVector<CanType, 2> depTypes;
llvm::DenseMap<CanType, DependentConstraints> constraints;
llvm::DenseMap<CanType, SmallVector<CanType, 2>> sameTypes;
builder->enumerateRequirements([&](RequirementKind kind,
ArchetypeBuilder::PotentialArchetype *archetype,
llvm::PointerUnion<Type, ArchetypeBuilder::PotentialArchetype *> type,
RequirementSource source) {
CanType depTy
= archetype->getDependentType(*builder, false)->getCanonicalType();
// Filter out redundant requirements.
switch (source.getKind()) {
case RequirementSource::Explicit:
// The requirement was explicit and required, keep it.
break;
case RequirementSource::Protocol:
// Keep witness markers.
if (kind == RequirementKind::WitnessMarker)
break;
return;
case RequirementSource::Redundant:
case RequirementSource::Inferred:
// The requirement was inferred or redundant, drop it.
return;
case RequirementSource::OuterScope:
llvm_unreachable("shouldn't have an outer scope!");
}
switch (kind) {
case RequirementKind::WitnessMarker: {
// Introduce the dependent type into the constraint set, to ensure we
// have a record for every dependent type.
depTypes.push_back(depTy);
return;
}
case RequirementKind::Superclass: {
assert(std::find(depTypes.begin(), depTypes.end(),
depTy) != depTypes.end()
&& "didn't see witness marker first?");
// Organize conformance constraints, sifting out the base class
// requirement.
auto &depConstraints = constraints[depTy];
auto constraintType = type.get<Type>()->getCanonicalType();
assert(depConstraints.baseClass.isNull()
&& "multiple base class constraints?!");
depConstraints.baseClass = constraintType;
return;
}
case RequirementKind::Conformance: {
assert(std::find(depTypes.begin(), depTypes.end(),
depTy) != depTypes.end()
&& "didn't see witness marker first?");
// Organize conformance constraints, sifting out the base class
// requirement.
auto &depConstraints = constraints[depTy];
auto constraintType = type.get<Type>()->getCanonicalType();
assert(constraintType->isExistentialType());
depConstraints.protocols.push_back(constraintType);
return;
}
case RequirementKind::SameType:
// Collect the same-type constraints by their representative.
CanType repTy;
if (auto concreteTy = type.dyn_cast<Type>()) {
// Maybe we were equated to a concrete type...
repTy = concreteTy->getCanonicalType();
} else {
// ...or to a representative dependent type that was in turn equated
// to a concrete type.
auto representative
= type.get<ArchetypeBuilder::PotentialArchetype *>();
if (representative->isConcreteType())
repTy = representative->getConcreteType()->getCanonicalType();
else
repTy = representative->getDependentType(*builder, false)
->getCanonicalType();
}
sameTypes[repTy].push_back(depTy);
return;
}
});
// Order the dependent types canonically.
llvm::array_pod_sort(depTypes.begin(), depTypes.end(), compareDependentTypes);
// Build a new set of minimized requirements.
// Emit the conformance constraints.
SmallVector<Requirement, 4> minimalRequirements;
for (auto depTy : depTypes) {
minimalRequirements.push_back(Requirement(RequirementKind::WitnessMarker,
depTy, Type()));
auto foundConstraints = constraints.find(depTy);
if (foundConstraints != constraints.end()) {
const auto &depConstraints = foundConstraints->second;
if (depConstraints.baseClass)
minimalRequirements.push_back(Requirement(RequirementKind::Superclass,
depTy,
depConstraints.baseClass));
for (auto protocol : depConstraints.protocols)
minimalRequirements.push_back(Requirement(RequirementKind::Conformance,
depTy, protocol));
}
}
// Collect the same type constraints.
unsigned sameTypeBegin = minimalRequirements.size();
for (auto &group : sameTypes) {
// Sort the types in the set.
auto types = std::move(group.second);
types.push_back(group.first);
llvm::array_pod_sort(types.begin(), types.end(), compareDependentTypes);
// Form constraints with the greater type on the right (which will be the
// concrete type, if one).
auto rhsType = types.pop_back_val();
for (auto lhsType : types)
minimalRequirements.push_back(Requirement(RequirementKind::SameType,
lhsType, rhsType));
}
// Sort the same-types by LHS, then by RHS.
std::sort(minimalRequirements.begin() + sameTypeBegin, minimalRequirements.end(),
[](const Requirement &a, const Requirement &b) -> bool {
assert(a.getKind() == b.getKind()
&& a.getKind() == RequirementKind::SameType
&& "not same type constraints");
CanType aLHS(a.getFirstType()), bLHS(b.getFirstType());
if (int compareLHS = compareDependentTypes(&aLHS, &bLHS))
return compareLHS < 0;
CanType aRHS(a.getSecondType()), bRHS(b.getSecondType());
return compareDependentTypes(&aRHS, &bRHS);
});
// Build the minimized signature.
auto manglingSig = GenericSignature::get(canonical->getGenericParams(),
minimalRequirements,
/*isKnownCanonical=*/true);
CanGenericSignature canSig(manglingSig);
// Cache the result.
Context.ManglingSignatures.insert({{canonical, &M}, canSig});
Context.setArchetypeBuilder(canSig, &M, std::move(builder));
return canSig;
}
ASTContext &GenericSignature::getASTContext() const {
// Canonical signatures store the ASTContext directly.
if (auto ctx = CanonicalSignatureOrASTContext.dyn_cast<ASTContext *>())
return *ctx;
// For everything else, just get it from the generic parameter.
return getASTContext(getGenericParams(), getRequirements());
}
TypeSubstitutionMap
GenericSignature::getSubstitutionMap(ArrayRef<Substitution> args) const {
TypeSubstitutionMap subs;
// An empty parameter list gives an empty map.
if (getGenericParams().empty()) {
assert(args.empty() && "substitutions but no generic params?!");
return subs;
}
// Seed the type map with pre-existing substitutions.
for (auto depTy : getAllDependentTypes()) {
auto replacement = args.front().getReplacement();
args = args.slice(1);
if (auto subTy = depTy->getAs<SubstitutableType>()) {
subs[subTy->getCanonicalType().getPointer()] = replacement;
}
else if (auto dTy = depTy->getAs<DependentMemberType>()) {
subs[dTy->getCanonicalType().getPointer()] = replacement;
}
}
assert(args.empty() && "did not use all substitutions?!");
return subs;
}
bool GenericSignature::requiresClass(Type type, ModuleDecl &mod) {
if (!type->isTypeParameter()) return false;
auto &builder = *getArchetypeBuilder(mod);
auto pa = builder.resolveArchetype(type);
if (!pa) return false;
pa = pa->getRepresentative();
// If this type was mapped to a concrete type, then there is no
// requirement.
if (pa->isConcreteType()) return false;
// If there is a superclass bound, then obviously it must be a class.
if (pa->getSuperclass()) return true;
// If any of the protocols are class-bound, then it must be a class.
for (auto proto : pa->getConformsTo()) {
if (proto.first->requiresClass()) return true;
}
return false;
}
/// Determine the superclass bound on the given dependent type.
Type GenericSignature::getSuperclassBound(Type type, ModuleDecl &mod) {
if (!type->isTypeParameter()) return nullptr;
auto &builder = *getArchetypeBuilder(mod);
auto pa = builder.resolveArchetype(type);
if (!pa) return nullptr;
pa = pa->getRepresentative();
// If this type was mapped to a concrete type, then there is no
// requirement.
if (pa->isConcreteType()) return nullptr;
// Retrieve the superclass bound.
return pa->getSuperclass();
}
/// Determine the set of protocols to which the given dependent type
/// must conform.
SmallVector<ProtocolDecl *, 2> GenericSignature::getConformsTo(Type type,
ModuleDecl &mod) {
if (!type->isTypeParameter()) return { };
auto &builder = *getArchetypeBuilder(mod);
auto pa = builder.resolveArchetype(type);
if (!pa) return { };
pa = pa->getRepresentative();
// If this type was mapped to a concrete type, then there are no
// requirements.
if (pa->isConcreteType()) return { };
// Retrieve the protocols to which this type conforms.
SmallVector<ProtocolDecl *, 2> result;
for (auto proto : pa->getConformsTo())
result.push_back(proto.first);
// Canonicalize the resulting set of protocols.
ProtocolType::canonicalizeProtocols(result);
return result;
}
/// Determine whether the given dependent type is equal to a concrete type.
bool GenericSignature::isConcreteType(Type type, ModuleDecl &mod) {
return bool(getConcreteType(type, mod));
}
/// Return the concrete type that the given dependent type is constrained to,
/// or the null Type if it is not the subject of a concrete same-type
/// constraint.
Type GenericSignature::getConcreteType(Type type, ModuleDecl &mod) {
if (!type->isTypeParameter()) return Type();
auto &builder = *getArchetypeBuilder(mod);
auto pa = builder.resolveArchetype(type);
if (!pa) return Type();
pa = pa->getRepresentative();
if (!pa->isConcreteType()) return Type();
return pa->getConcreteType();
}
Type GenericSignature::getRepresentative(Type type, ModuleDecl &mod) {
assert(type->isTypeParameter());
auto &builder = *getArchetypeBuilder(mod);
auto pa = builder.resolveArchetype(type);
assert(pa && "not a valid dependent type of this signature?");
auto rep = pa->getRepresentative();
if (rep->isConcreteType()) return rep->getConcreteType();
if (pa == rep) {
assert(rep->getDependentType(builder, /*allowUnresolved*/ false)
->getCanonicalType() == type->getCanonicalType());
return type;
}
return rep->getDependentType(builder, /*allowUnresolved*/ false);
}
bool GenericSignature::areSameTypeParameterInContext(Type type1, Type type2,
ModuleDecl &mod) {
assert(type1->isTypeParameter());
assert(type2->isTypeParameter());
if (type1.getPointer() == type2.getPointer())
return true;
auto &builder = *getArchetypeBuilder(mod);
auto pa1 = builder.resolveArchetype(type1);
assert(pa1 && "not a valid dependent type of this signature?");
pa1 = pa1->getRepresentative();
assert(!pa1->isConcreteType());
auto pa2 = builder.resolveArchetype(type2);
assert(pa2 && "not a valid dependent type of this signature?");
pa2 = pa2->getRepresentative();
assert(!pa2->isConcreteType());
return pa1 == pa2;
}
bool GenericSignature::isCanonicalTypeInContext(Type type, ModuleDecl &mod) {
// If the type isn't independently canonical, it's certainly not canonical
// in this context.
if (!type->isCanonical())
return false;
// All the contextual canonicality rules apply to type parameters, so if the
// type doesn't involve any type parameters, it's already canonical.
if (!type->hasTypeParameter())
return true;
auto &builder = *getArchetypeBuilder(mod);
// Look for non-canonical type parameters.
return !type.findIf([&](Type component) -> bool {
if (!component->isTypeParameter()) return false;
auto pa = builder.resolveArchetype(component);
if (!pa) return false;
auto rep = pa->getArchetypeAnchor();
return (rep->isConcreteType() || pa != rep);
});
}
CanType GenericSignature::getCanonicalTypeInContext(Type type, ModuleDecl &mod) {
type = type->getCanonicalType();
// All the contextual canonicality rules apply to type parameters, so if the
// type doesn't involve any type parameters, it's already canonical.
if (!type->hasTypeParameter())
return CanType(type);
auto &builder = *getArchetypeBuilder(mod);
// Replace non-canonical type parameters.
type = type.transform([&](Type component) -> Type {
if (!component->isTypeParameter()) return component;
// Resolve the potential archetype. This can be null in nested generic
// types, which we can't immediately canonicalize.
auto pa = builder.resolveArchetype(component);
if (!pa) return component;
auto rep = pa->getArchetypeAnchor();
if (rep->isConcreteType()) {
return getCanonicalTypeInContext(rep->getConcreteType(), mod);
} else {
return rep->getDependentType(builder, /*allowUnresolved*/ false);
}
});
return type->getCanonicalType();
}
| apache-2.0 |
jekinchen/flatjni | tests/test.cpp | 66867 | /*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "flatbuffers/flatbuffers.h"
#include "flatbuffers/idl.h"
#include "flatbuffers/util.h"
#include "flatbuffers/registry.h"
#include "monster_test_generated.h"
#include "namespace_test/namespace_test1_generated.h"
#include "namespace_test/namespace_test2_generated.h"
#include "union_vector/union_vector_generated.h"
#ifndef FLATBUFFERS_CPP98_STL
#include <random>
#endif
#include "flatbuffers/flexbuffers.h"
using namespace MyGame::Example;
#ifdef __ANDROID__
#include <android/log.h>
#define TEST_OUTPUT_LINE(...) \
__android_log_print(ANDROID_LOG_INFO, "FlatBuffers", __VA_ARGS__)
#define FLATBUFFERS_NO_FILE_TESTS
#else
#define TEST_OUTPUT_LINE(...) \
{ printf(__VA_ARGS__); printf("\n"); }
#endif
int testing_fails = 0;
void TestFail(const char *expval, const char *val, const char *exp,
const char *file, int line) {
TEST_OUTPUT_LINE("TEST FAILED: %s:%d, %s (%s) != %s", file, line,
exp, expval, val);
assert(0);
testing_fails++;
}
void TestEqStr(const char *expval, const char *val, const char *exp,
const char *file, int line) {
if (strcmp(expval, val) != 0) {
TestFail(expval, val, exp, file, line);
}
}
template<typename T, typename U>
void TestEq(T expval, U val, const char *exp, const char *file, int line) {
if (U(expval) != val) {
TestFail(flatbuffers::NumToString(expval).c_str(),
flatbuffers::NumToString(val).c_str(),
exp, file, line);
}
}
#define TEST_EQ(exp, val) TestEq(exp, val, #exp, __FILE__, __LINE__)
#define TEST_NOTNULL(exp) TestEq(exp == NULL, false, #exp, __FILE__, __LINE__)
#define TEST_EQ_STR(exp, val) TestEqStr(exp, val, #exp, __FILE__, __LINE__)
// Include simple random number generator to ensure results will be the
// same cross platform.
// http://en.wikipedia.org/wiki/Park%E2%80%93Miller_random_number_generator
uint32_t lcg_seed = 48271;
uint32_t lcg_rand() {
return lcg_seed = ((uint64_t)lcg_seed * 279470273UL) % 4294967291UL;
}
void lcg_reset() { lcg_seed = 48271; }
std::string test_data_path = "tests/";
// example of how to build up a serialized buffer algorithmically:
flatbuffers::DetachedBuffer CreateFlatBufferTest(std::string &buffer) {
flatbuffers::FlatBufferBuilder builder;
auto vec = Vec3(1, 2, 3, 0, Color_Red, Test(10, 20));
auto name = builder.CreateString("MyMonster");
unsigned char inv_data[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
auto inventory = builder.CreateVector(inv_data, 10);
// Alternatively, create the vector first, and fill in data later:
// unsigned char *inv_buf = nullptr;
// auto inventory = builder.CreateUninitializedVector<unsigned char>(
// 10, &inv_buf);
// memcpy(inv_buf, inv_data, 10);
Test tests[] = { Test(10, 20), Test(30, 40) };
auto testv = builder.CreateVectorOfStructs(tests, 2);
// create monster with very few fields set:
// (same functionality as CreateMonster below, but sets fields manually)
flatbuffers::Offset<Monster> mlocs[3];
auto fred = builder.CreateString("Fred");
auto barney = builder.CreateString("Barney");
auto wilma = builder.CreateString("Wilma");
MonsterBuilder mb1(builder);
mb1.add_name(fred);
mlocs[0] = mb1.Finish();
MonsterBuilder mb2(builder);
mb2.add_name(barney);
mb2.add_hp(1000);
mlocs[1] = mb2.Finish();
MonsterBuilder mb3(builder);
mb3.add_name(wilma);
mlocs[2] = mb3.Finish();
// Create an array of strings. Also test string pooling, and lambdas.
const char *names[] = { "bob", "fred", "bob", "fred" };
auto vecofstrings =
builder.CreateVector<flatbuffers::Offset<flatbuffers::String>>(4,
[&](size_t i) {
return builder.CreateSharedString(names[i]);
});
// Creating vectors of strings in one convenient call.
std::vector<std::string> names2;
names2.push_back("jane");
names2.push_back("mary");
auto vecofstrings2 = builder.CreateVectorOfStrings(names2);
// Create an array of sorted tables, can be used with binary search when read:
auto vecoftables = builder.CreateVectorOfSortedTables(mlocs, 3);
// Create an array of sorted structs,
// can be used with binary search when read:
std::vector<Ability> abilities;
abilities.push_back(Ability(4, 40));
abilities.push_back(Ability(3, 30));
abilities.push_back(Ability(2, 20));
abilities.push_back(Ability(1, 10));
auto vecofstructs = builder.CreateVectorOfSortedStructs(&abilities);
// Create a nested FlatBuffer.
// Nested FlatBuffers are stored in a ubyte vector, which can be convenient
// since they can be memcpy'd around much easier than other FlatBuffer
// values. They have little overhead compared to storing the table directly.
// As a test, create a mostly empty Monster buffer:
flatbuffers::FlatBufferBuilder nested_builder;
auto nmloc = CreateMonster(nested_builder, nullptr, 0, 0,
nested_builder.CreateString("NestedMonster"));
FinishMonsterBuffer(nested_builder, nmloc);
// Now we can store the buffer in the parent. Note that by default, vectors
// are only aligned to their elements or size field, so in this case if the
// buffer contains 64-bit elements, they may not be correctly aligned. We fix
// that with:
builder.ForceVectorAlignment(nested_builder.GetSize(), sizeof(uint8_t),
nested_builder.GetBufferMinAlignment());
// If for whatever reason you don't have the nested_builder available, you
// can substitute flatbuffers::largest_scalar_t (64-bit) for the alignment, or
// the largest force_align value in your schema if you're using it.
auto nested_flatbuffer_vector =
builder.CreateVector(nested_builder.GetBufferPointer(),
nested_builder.GetSize());
// Test a nested FlexBuffer:
flexbuffers::Builder flexbuild;
flexbuild.Int(1234);
flexbuild.Finish();
auto flex = builder.CreateVector(flexbuild.GetBuffer());
// shortcut for creating monster with all fields set:
auto mloc = CreateMonster(builder, &vec, 150, 80, name, inventory, Color_Blue,
Any_Monster, mlocs[1].Union(), // Store a union.
testv, vecofstrings, vecoftables, 0,
nested_flatbuffer_vector, 0, false,
0, 0, 0, 0, 0, 0, 0, 0, 0, 3.14159f, 3.0f, 0.0f,
vecofstrings2, vecofstructs, flex);
FinishMonsterBuffer(builder, mloc);
#ifdef FLATBUFFERS_TEST_VERBOSE
// print byte data for debugging:
auto p = builder.GetBufferPointer();
for (flatbuffers::uoffset_t i = 0; i < builder.GetSize(); i++)
printf("%d ", p[i]);
#endif
// return the buffer for the caller to use.
auto bufferpointer =
reinterpret_cast<const char *>(builder.GetBufferPointer());
buffer.assign(bufferpointer, bufferpointer + builder.GetSize());
return builder.ReleaseBufferPointer();
}
// example of accessing a buffer loaded in memory:
void AccessFlatBufferTest(const uint8_t *flatbuf, size_t length,
bool pooled = true) {
// First, verify the buffers integrity (optional)
flatbuffers::Verifier verifier(flatbuf, length);
TEST_EQ(VerifyMonsterBuffer(verifier), true);
std::vector<uint8_t> test_buff;
test_buff.resize(length * 2);
std::memcpy(&test_buff[0], flatbuf , length);
std::memcpy(&test_buff[length], flatbuf , length);
flatbuffers::Verifier verifier1(&test_buff[0], length);
TEST_EQ(VerifyMonsterBuffer(verifier1), true);
TEST_EQ(verifier1.GetComputedSize(), length);
flatbuffers::Verifier verifier2(&test_buff[length], length);
TEST_EQ(VerifyMonsterBuffer(verifier2), true);
TEST_EQ(verifier2.GetComputedSize(), length);
TEST_EQ(strcmp(MonsterIdentifier(), "MONS"), 0);
TEST_EQ(MonsterBufferHasIdentifier(flatbuf), true);
TEST_EQ(strcmp(MonsterExtension(), "mon"), 0);
// Access the buffer from the root.
auto monster = GetMonster(flatbuf);
TEST_EQ(monster->hp(), 80);
TEST_EQ(monster->mana(), 150); // default
TEST_EQ_STR(monster->name()->c_str(), "MyMonster");
// Can't access the following field, it is deprecated in the schema,
// which means accessors are not generated:
// monster.friendly()
auto pos = monster->pos();
TEST_NOTNULL(pos);
TEST_EQ(pos->z(), 3);
TEST_EQ(pos->test3().a(), 10);
TEST_EQ(pos->test3().b(), 20);
auto inventory = monster->inventory();
TEST_EQ(VectorLength(inventory), 10UL); // Works even if inventory is null.
TEST_NOTNULL(inventory);
unsigned char inv_data[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
for (auto it = inventory->begin(); it != inventory->end(); ++it)
TEST_EQ(*it, inv_data[it - inventory->begin()]);
TEST_EQ(monster->color(), Color_Blue);
// Example of accessing a union:
TEST_EQ(monster->test_type(), Any_Monster); // First make sure which it is.
auto monster2 = reinterpret_cast<const Monster *>(monster->test());
TEST_NOTNULL(monster2);
TEST_EQ_STR(monster2->name()->c_str(), "Fred");
// Example of accessing a vector of strings:
auto vecofstrings = monster->testarrayofstring();
TEST_EQ(vecofstrings->Length(), 4U);
TEST_EQ_STR(vecofstrings->Get(0)->c_str(), "bob");
TEST_EQ_STR(vecofstrings->Get(1)->c_str(), "fred");
if (pooled) {
// These should have pointer equality because of string pooling.
TEST_EQ(vecofstrings->Get(0)->c_str(), vecofstrings->Get(2)->c_str());
TEST_EQ(vecofstrings->Get(1)->c_str(), vecofstrings->Get(3)->c_str());
}
auto vecofstrings2 = monster->testarrayofstring2();
if (vecofstrings2) {
TEST_EQ(vecofstrings2->Length(), 2U);
TEST_EQ_STR(vecofstrings2->Get(0)->c_str(), "jane");
TEST_EQ_STR(vecofstrings2->Get(1)->c_str(), "mary");
}
// Example of accessing a vector of tables:
auto vecoftables = monster->testarrayoftables();
TEST_EQ(vecoftables->Length(), 3U);
for (auto it = vecoftables->begin(); it != vecoftables->end(); ++it)
TEST_EQ(strlen(it->name()->c_str()) >= 4, true);
TEST_EQ_STR(vecoftables->Get(0)->name()->c_str(), "Barney");
TEST_EQ(vecoftables->Get(0)->hp(), 1000);
TEST_EQ_STR(vecoftables->Get(1)->name()->c_str(), "Fred");
TEST_EQ_STR(vecoftables->Get(2)->name()->c_str(), "Wilma");
TEST_NOTNULL(vecoftables->LookupByKey("Barney"));
TEST_NOTNULL(vecoftables->LookupByKey("Fred"));
TEST_NOTNULL(vecoftables->LookupByKey("Wilma"));
// Test accessing a vector of sorted structs
auto vecofstructs = monster->testarrayofsortedstruct();
if (vecofstructs) { // not filled in monster_test.bfbs
for (flatbuffers::uoffset_t i = 0; i < vecofstructs->size()-1; i++) {
auto left = vecofstructs->Get(i);
auto right = vecofstructs->Get(i+1);
TEST_EQ(true, (left->KeyCompareLessThan(right)));
}
TEST_NOTNULL(vecofstructs->LookupByKey(3));
TEST_EQ(static_cast<const Ability*>(nullptr), vecofstructs->LookupByKey(5));
}
// Test nested FlatBuffers if available:
auto nested_buffer = monster->testnestedflatbuffer();
if (nested_buffer) {
// nested_buffer is a vector of bytes you can memcpy. However, if you
// actually want to access the nested data, this is a convenient
// accessor that directly gives you the root table:
auto nested_monster = monster->testnestedflatbuffer_nested_root();
TEST_EQ_STR(nested_monster->name()->c_str(), "NestedMonster");
}
// Test flexbuffer if available:
auto flex = monster->flex();
// flex is a vector of bytes you can memcpy etc.
TEST_EQ(flex->size(), 4); // Encoded FlexBuffer bytes.
// However, if you actually want to access the nested data, this is a
// convenient accessor that directly gives you the root value:
TEST_EQ(monster->flex_flexbuffer_root().AsInt16(), 1234);
// Since Flatbuffers uses explicit mechanisms to override the default
// compiler alignment, double check that the compiler indeed obeys them:
// (Test consists of a short and byte):
TEST_EQ(flatbuffers::AlignOf<Test>(), 2UL);
TEST_EQ(sizeof(Test), 4UL);
auto tests = monster->test4();
TEST_NOTNULL(tests);
auto test_0 = tests->Get(0);
auto test_1 = tests->Get(1);
TEST_EQ(test_0->a(), 10);
TEST_EQ(test_0->b(), 20);
TEST_EQ(test_1->a(), 30);
TEST_EQ(test_1->b(), 40);
for (auto it = tests->begin(); it != tests->end(); ++it) {
TEST_EQ(it->a() == 10 || it->a() == 30, true); // Just testing iterators.
}
// Checking for presence of fields:
TEST_EQ(flatbuffers::IsFieldPresent(monster, Monster::VT_HP), true);
TEST_EQ(flatbuffers::IsFieldPresent(monster, Monster::VT_MANA), false);
// Obtaining a buffer from a root:
TEST_EQ(GetBufferStartFromRootPointer(monster), flatbuf);
}
// Change a FlatBuffer in-place, after it has been constructed.
void MutateFlatBuffersTest(uint8_t *flatbuf, std::size_t length) {
// Get non-const pointer to root.
auto monster = GetMutableMonster(flatbuf);
// Each of these tests mutates, then tests, then set back to the original,
// so we can test that the buffer in the end still passes our original test.
auto hp_ok = monster->mutate_hp(10);
TEST_EQ(hp_ok, true); // Field was present.
TEST_EQ(monster->hp(), 10);
// Mutate to default value
auto hp_ok_default = monster->mutate_hp(100);
TEST_EQ(hp_ok_default, true); // Field was present.
TEST_EQ(monster->hp(), 100);
// Test that mutate to default above keeps field valid for further mutations
auto hp_ok_2 = monster->mutate_hp(20);
TEST_EQ(hp_ok_2, true);
TEST_EQ(monster->hp(), 20);
monster->mutate_hp(80);
// Monster originally at 150 mana (default value)
auto mana_default_ok = monster->mutate_mana(150); // Mutate to default value.
TEST_EQ(mana_default_ok, true); // Mutation should succeed, because default value.
TEST_EQ(monster->mana(), 150);
auto mana_ok = monster->mutate_mana(10);
TEST_EQ(mana_ok, false); // Field was NOT present, because default value.
TEST_EQ(monster->mana(), 150);
// Mutate structs.
auto pos = monster->mutable_pos();
auto test3 = pos->mutable_test3(); // Struct inside a struct.
test3.mutate_a(50); // Struct fields never fail.
TEST_EQ(test3.a(), 50);
test3.mutate_a(10);
// Mutate vectors.
auto inventory = monster->mutable_inventory();
inventory->Mutate(9, 100);
TEST_EQ(inventory->Get(9), 100);
inventory->Mutate(9, 9);
auto tables = monster->mutable_testarrayoftables();
auto first = tables->GetMutableObject(0);
TEST_EQ(first->hp(), 1000);
first->mutate_hp(0);
TEST_EQ(first->hp(), 0);
first->mutate_hp(1000);
// Run the verifier and the regular test to make sure we didn't trample on
// anything.
AccessFlatBufferTest(flatbuf, length);
}
// Unpack a FlatBuffer into objects.
void ObjectFlatBuffersTest(uint8_t *flatbuf) {
// Optional: we can specify resolver and rehasher functions to turn hashed
// strings into object pointers and back, to implement remote references
// and such.
auto resolver = flatbuffers::resolver_function_t(
[](void **pointer_adr, flatbuffers::hash_value_t hash) {
(void)pointer_adr;
(void)hash;
// Don't actually do anything, leave variable null.
});
auto rehasher = flatbuffers::rehasher_function_t(
[](void *pointer) -> flatbuffers::hash_value_t {
(void)pointer;
return 0;
});
// Turn a buffer into C++ objects.
auto monster1 = UnPackMonster(flatbuf, &resolver);
// Re-serialize the data.
flatbuffers::FlatBufferBuilder fbb1;
fbb1.Finish(CreateMonster(fbb1, monster1.get(), &rehasher),
MonsterIdentifier());
// Unpack again, and re-serialize again.
auto monster2 = UnPackMonster(fbb1.GetBufferPointer(), &resolver);
flatbuffers::FlatBufferBuilder fbb2;
fbb2.Finish(CreateMonster(fbb2, monster2.get(), &rehasher),
MonsterIdentifier());
// Now we've gone full round-trip, the two buffers should match.
auto len1 = fbb1.GetSize();
auto len2 = fbb2.GetSize();
TEST_EQ(len1, len2);
TEST_EQ(memcmp(fbb1.GetBufferPointer(), fbb2.GetBufferPointer(),
len1), 0);
// Test it with the original buffer test to make sure all data survived.
AccessFlatBufferTest(fbb2.GetBufferPointer(), len2, false);
// Test accessing fields, similar to AccessFlatBufferTest above.
TEST_EQ(monster2->hp, 80);
TEST_EQ(monster2->mana, 150); // default
TEST_EQ_STR(monster2->name.c_str(), "MyMonster");
auto &pos = monster2->pos;
TEST_NOTNULL(pos);
TEST_EQ(pos->z(), 3);
TEST_EQ(pos->test3().a(), 10);
TEST_EQ(pos->test3().b(), 20);
auto &inventory = monster2->inventory;
TEST_EQ(inventory.size(), 10UL);
unsigned char inv_data[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
for (auto it = inventory.begin(); it != inventory.end(); ++it)
TEST_EQ(*it, inv_data[it - inventory.begin()]);
TEST_EQ(monster2->color, Color_Blue);
auto monster3 = monster2->test.AsMonster();
TEST_NOTNULL(monster3);
TEST_EQ_STR(monster3->name.c_str(), "Fred");
auto &vecofstrings = monster2->testarrayofstring;
TEST_EQ(vecofstrings.size(), 4U);
TEST_EQ_STR(vecofstrings[0].c_str(), "bob");
TEST_EQ_STR(vecofstrings[1].c_str(), "fred");
auto &vecofstrings2 = monster2->testarrayofstring2;
TEST_EQ(vecofstrings2.size(), 2U);
TEST_EQ_STR(vecofstrings2[0].c_str(), "jane");
TEST_EQ_STR(vecofstrings2[1].c_str(), "mary");
auto &vecoftables = monster2->testarrayoftables;
TEST_EQ(vecoftables.size(), 3U);
TEST_EQ_STR(vecoftables[0]->name.c_str(), "Barney");
TEST_EQ(vecoftables[0]->hp, 1000);
TEST_EQ_STR(vecoftables[1]->name.c_str(), "Fred");
TEST_EQ_STR(vecoftables[2]->name.c_str(), "Wilma");
auto &tests = monster2->test4;
TEST_EQ(tests[0].a(), 10);
TEST_EQ(tests[0].b(), 20);
TEST_EQ(tests[1].a(), 30);
TEST_EQ(tests[1].b(), 40);
}
// Prefix a FlatBuffer with a size field.
void SizePrefixedTest() {
// Create size prefixed buffer.
flatbuffers::FlatBufferBuilder fbb;
fbb.FinishSizePrefixed(CreateMonster(fbb, 0, 200, 300,
fbb.CreateString("bob")));
// Verify it.
flatbuffers::Verifier verifier(fbb.GetBufferPointer(), fbb.GetSize());
TEST_EQ(verifier.VerifySizePrefixedBuffer<Monster>(nullptr), true);
// Access it.
auto m = flatbuffers::GetSizePrefixedRoot<MyGame::Example::Monster>(
fbb.GetBufferPointer());
TEST_EQ(m->mana(), 200);
TEST_EQ(m->hp(), 300);
TEST_EQ_STR(m->name()->c_str(), "bob");
}
// example of parsing text straight into a buffer, and generating
// text back from it:
void ParseAndGenerateTextTest() {
// load FlatBuffer schema (.fbs) and JSON from disk
std::string schemafile;
std::string jsonfile;
TEST_EQ(flatbuffers::LoadFile(
(test_data_path + "monster_test.fbs").c_str(), false, &schemafile), true);
TEST_EQ(flatbuffers::LoadFile(
(test_data_path + "monsterdata_test.golden").c_str(), false, &jsonfile),
true);
// parse schema first, so we can use it to parse the data after
flatbuffers::Parser parser;
auto include_test_path =
flatbuffers::ConCatPathFileName(test_data_path, "include_test");
const char *include_directories[] = {
test_data_path.c_str(), include_test_path.c_str(), nullptr
};
TEST_EQ(parser.Parse(schemafile.c_str(), include_directories), true);
TEST_EQ(parser.Parse(jsonfile.c_str(), include_directories), true);
// here, parser.builder_ contains a binary buffer that is the parsed data.
// First, verify it, just in case:
flatbuffers::Verifier verifier(parser.builder_.GetBufferPointer(),
parser.builder_.GetSize());
TEST_EQ(VerifyMonsterBuffer(verifier), true);
AccessFlatBufferTest(parser.builder_.GetBufferPointer(),
parser.builder_.GetSize(), false);
// to ensure it is correct, we now generate text back from the binary,
// and compare the two:
std::string jsongen;
auto result = GenerateText(parser, parser.builder_.GetBufferPointer(), &jsongen);
TEST_EQ(result, true);
if (jsongen != jsonfile) {
printf("%s----------------\n%s", jsongen.c_str(), jsonfile.c_str());
TEST_NOTNULL(NULL);
}
// We can also do the above using the convenient Registry that knows about
// a set of file_identifiers mapped to schemas.
flatbuffers::Registry registry;
// Make sure schemas can find their includes.
registry.AddIncludeDirectory(test_data_path.c_str());
registry.AddIncludeDirectory(include_test_path.c_str());
// Call this with many schemas if possible.
registry.Register(MonsterIdentifier(),
(test_data_path + "monster_test.fbs").c_str());
// Now we got this set up, we can parse by just specifying the identifier,
// the correct schema will be loaded on the fly:
auto buf = registry.TextToFlatBuffer(jsonfile.c_str(),
MonsterIdentifier());
// If this fails, check registry.lasterror_.
TEST_NOTNULL(buf.data());
// Test the buffer, to be sure:
AccessFlatBufferTest(buf.data(), buf.size(), false);
// We can use the registry to turn this back into text, in this case it
// will get the file_identifier from the binary:
std::string text;
auto ok = registry.FlatBufferToText(buf.data(), buf.size(), &text);
// If this fails, check registry.lasterror_.
TEST_EQ(ok, true);
TEST_EQ_STR(text.c_str(), jsonfile.c_str());
}
void ReflectionTest(uint8_t *flatbuf, size_t length) {
// Load a binary schema.
std::string bfbsfile;
TEST_EQ(flatbuffers::LoadFile(
(test_data_path + "monster_test.bfbs").c_str(), true, &bfbsfile),
true);
// Verify it, just in case:
flatbuffers::Verifier verifier(
reinterpret_cast<const uint8_t *>(bfbsfile.c_str()), bfbsfile.length());
TEST_EQ(reflection::VerifySchemaBuffer(verifier), true);
// Make sure the schema is what we expect it to be.
auto &schema = *reflection::GetSchema(bfbsfile.c_str());
auto root_table = schema.root_table();
TEST_EQ_STR(root_table->name()->c_str(), "MyGame.Example.Monster");
auto fields = root_table->fields();
auto hp_field_ptr = fields->LookupByKey("hp");
TEST_NOTNULL(hp_field_ptr);
auto &hp_field = *hp_field_ptr;
TEST_EQ_STR(hp_field.name()->c_str(), "hp");
TEST_EQ(hp_field.id(), 2);
TEST_EQ(hp_field.type()->base_type(), reflection::Short);
auto friendly_field_ptr = fields->LookupByKey("friendly");
TEST_NOTNULL(friendly_field_ptr);
TEST_NOTNULL(friendly_field_ptr->attributes());
TEST_NOTNULL(friendly_field_ptr->attributes()->LookupByKey("priority"));
// Make sure the table index is what we expect it to be.
auto pos_field_ptr = fields->LookupByKey("pos");
TEST_NOTNULL(pos_field_ptr);
TEST_EQ(pos_field_ptr->type()->base_type(), reflection::Obj);
auto pos_table_ptr = schema.objects()->Get(pos_field_ptr->type()->index());
TEST_NOTNULL(pos_table_ptr);
TEST_EQ_STR(pos_table_ptr->name()->c_str(), "MyGame.Example.Vec3");
// Now use it to dynamically access a buffer.
auto &root = *flatbuffers::GetAnyRoot(flatbuf);
// Verify the buffer first using reflection based verification
TEST_EQ(flatbuffers::Verify(schema, *schema.root_table(), flatbuf, length),
true);
auto hp = flatbuffers::GetFieldI<uint16_t>(root, hp_field);
TEST_EQ(hp, 80);
// Rather than needing to know the type, we can also get the value of
// any field as an int64_t/double/string, regardless of what it actually is.
auto hp_int64 = flatbuffers::GetAnyFieldI(root, hp_field);
TEST_EQ(hp_int64, 80);
auto hp_double = flatbuffers::GetAnyFieldF(root, hp_field);
TEST_EQ(hp_double, 80.0);
auto hp_string = flatbuffers::GetAnyFieldS(root, hp_field, &schema);
TEST_EQ_STR(hp_string.c_str(), "80");
// Get struct field through reflection
auto pos_struct = flatbuffers::GetFieldStruct(root, *pos_field_ptr);
TEST_NOTNULL(pos_struct);
TEST_EQ(flatbuffers::GetAnyFieldF(
*pos_struct, *pos_table_ptr->fields()->LookupByKey("z")), 3.0f);
auto test3_field = pos_table_ptr->fields()->LookupByKey("test3");
auto test3_struct = flatbuffers::GetFieldStruct(*pos_struct, *test3_field);
TEST_NOTNULL(test3_struct);
auto test3_object = schema.objects()->Get(test3_field->type()->index());
TEST_EQ(flatbuffers::GetAnyFieldF(
*test3_struct, *test3_object->fields()->LookupByKey("a")), 10);
// We can also modify it.
flatbuffers::SetField<uint16_t>(&root, hp_field, 200);
hp = flatbuffers::GetFieldI<uint16_t>(root, hp_field);
TEST_EQ(hp, 200);
// We can also set fields generically:
flatbuffers::SetAnyFieldI(&root, hp_field, 300);
hp_int64 = flatbuffers::GetAnyFieldI(root, hp_field);
TEST_EQ(hp_int64, 300);
flatbuffers::SetAnyFieldF(&root, hp_field, 300.5);
hp_int64 = flatbuffers::GetAnyFieldI(root, hp_field);
TEST_EQ(hp_int64, 300);
flatbuffers::SetAnyFieldS(&root, hp_field, "300");
hp_int64 = flatbuffers::GetAnyFieldI(root, hp_field);
TEST_EQ(hp_int64, 300);
// Test buffer is valid after the modifications
TEST_EQ(flatbuffers::Verify(schema, *schema.root_table(), flatbuf, length),
true);
// Reset it, for further tests.
flatbuffers::SetField<uint16_t>(&root, hp_field, 80);
// More advanced functionality: changing the size of items in-line!
// First we put the FlatBuffer inside an std::vector.
std::vector<uint8_t> resizingbuf(flatbuf, flatbuf + length);
// Find the field we want to modify.
auto &name_field = *fields->LookupByKey("name");
// Get the root.
// This time we wrap the result from GetAnyRoot in a smartpointer that
// will keep rroot valid as resizingbuf resizes.
auto rroot = flatbuffers::piv(flatbuffers::GetAnyRoot(resizingbuf.data()),
resizingbuf);
SetString(schema, "totally new string", GetFieldS(**rroot, name_field),
&resizingbuf);
// Here resizingbuf has changed, but rroot is still valid.
TEST_EQ_STR(GetFieldS(**rroot, name_field)->c_str(), "totally new string");
// Now lets extend a vector by 100 elements (10 -> 110).
auto &inventory_field = *fields->LookupByKey("inventory");
auto rinventory = flatbuffers::piv(
flatbuffers::GetFieldV<uint8_t>(**rroot, inventory_field),
resizingbuf);
flatbuffers::ResizeVector<uint8_t>(schema, 110, 50, *rinventory,
&resizingbuf);
// rinventory still valid, so lets read from it.
TEST_EQ(rinventory->Get(10), 50);
// For reflection uses not covered already, there is a more powerful way:
// we can simply generate whatever object we want to add/modify in a
// FlatBuffer of its own, then add that to an existing FlatBuffer:
// As an example, let's add a string to an array of strings.
// First, find our field:
auto &testarrayofstring_field = *fields->LookupByKey("testarrayofstring");
// Find the vector value:
auto rtestarrayofstring = flatbuffers::piv(
flatbuffers::GetFieldV<flatbuffers::Offset<flatbuffers::String>>(
**rroot, testarrayofstring_field),
resizingbuf);
// It's a vector of 2 strings, to which we add one more, initialized to
// offset 0.
flatbuffers::ResizeVector<flatbuffers::Offset<flatbuffers::String>>(
schema, 3, 0, *rtestarrayofstring, &resizingbuf);
// Here we just create a buffer that contans a single string, but this
// could also be any complex set of tables and other values.
flatbuffers::FlatBufferBuilder stringfbb;
stringfbb.Finish(stringfbb.CreateString("hank"));
// Add the contents of it to our existing FlatBuffer.
// We do this last, so the pointer doesn't get invalidated (since it is
// at the end of the buffer):
auto string_ptr = flatbuffers::AddFlatBuffer(resizingbuf,
stringfbb.GetBufferPointer(),
stringfbb.GetSize());
// Finally, set the new value in the vector.
rtestarrayofstring->MutateOffset(2, string_ptr);
TEST_EQ_STR(rtestarrayofstring->Get(0)->c_str(), "bob");
TEST_EQ_STR(rtestarrayofstring->Get(2)->c_str(), "hank");
// Test integrity of all resize operations above.
flatbuffers::Verifier resize_verifier(
reinterpret_cast<const uint8_t *>(resizingbuf.data()),
resizingbuf.size());
TEST_EQ(VerifyMonsterBuffer(resize_verifier), true);
// Test buffer is valid using reflection as well
TEST_EQ(flatbuffers::Verify(schema, *schema.root_table(), resizingbuf.data(),
resizingbuf.size()), true);
// As an additional test, also set it on the name field.
// Note: unlike the name change above, this just overwrites the offset,
// rather than changing the string in-place.
SetFieldT(*rroot, name_field, string_ptr);
TEST_EQ_STR(GetFieldS(**rroot, name_field)->c_str(), "hank");
// Using reflection, rather than mutating binary FlatBuffers, we can also copy
// tables and other things out of other FlatBuffers into a FlatBufferBuilder,
// either part or whole.
flatbuffers::FlatBufferBuilder fbb;
auto root_offset = flatbuffers::CopyTable(fbb, schema, *root_table,
*flatbuffers::GetAnyRoot(flatbuf),
true);
fbb.Finish(root_offset, MonsterIdentifier());
// Test that it was copied correctly:
AccessFlatBufferTest(fbb.GetBufferPointer(), fbb.GetSize());
// Test buffer is valid using reflection as well
TEST_EQ(flatbuffers::Verify(schema, *schema.root_table(),
fbb.GetBufferPointer(), fbb.GetSize()), true);
}
// Parse a .proto schema, output as .fbs
void ParseProtoTest() {
// load the .proto and the golden file from disk
std::string protofile;
std::string goldenfile;
TEST_EQ(flatbuffers::LoadFile(
(test_data_path + "prototest/test.proto").c_str(), false, &protofile),
true);
TEST_EQ(flatbuffers::LoadFile(
(test_data_path + "prototest/test.golden").c_str(), false, &goldenfile),
true);
flatbuffers::IDLOptions opts;
opts.include_dependence_headers = false;
opts.proto_mode = true;
// Parse proto.
flatbuffers::Parser parser(opts);
auto protopath = test_data_path + "prototest/";
const char *include_directories[] = { protopath.c_str(), nullptr };
TEST_EQ(parser.Parse(protofile.c_str(), include_directories), true);
// Generate fbs.
auto fbs = flatbuffers::GenerateFBS(parser, "test");
// Ensure generated file is parsable.
flatbuffers::Parser parser2;
TEST_EQ(parser2.Parse(fbs.c_str(), nullptr), true);
if (fbs != goldenfile) {
printf("%s----------------\n%s", fbs.c_str(), goldenfile.c_str());
TEST_NOTNULL(NULL);
}
}
template<typename T> void CompareTableFieldValue(flatbuffers::Table *table,
flatbuffers::voffset_t voffset,
T val) {
T read = table->GetField(voffset, static_cast<T>(0));
TEST_EQ(read, val);
}
// Low level stress/fuzz test: serialize/deserialize a variety of
// different kinds of data in different combinations
void FuzzTest1() {
// Values we're testing against: chosen to ensure no bits get chopped
// off anywhere, and also be different from eachother.
const uint8_t bool_val = true;
const int8_t char_val = -127; // 0x81
const uint8_t uchar_val = 0xFF;
const int16_t short_val = -32222; // 0x8222;
const uint16_t ushort_val = 0xFEEE;
const int32_t int_val = 0x83333333;
const uint32_t uint_val = 0xFDDDDDDD;
const int64_t long_val = 0x8444444444444444LL;
const uint64_t ulong_val = 0xFCCCCCCCCCCCCCCCULL;
const float float_val = 3.14159f;
const double double_val = 3.14159265359;
const int test_values_max = 11;
const flatbuffers::voffset_t fields_per_object = 4;
const int num_fuzz_objects = 10000; // The higher, the more thorough :)
flatbuffers::FlatBufferBuilder builder;
lcg_reset(); // Keep it deterministic.
flatbuffers::uoffset_t objects[num_fuzz_objects];
// Generate num_fuzz_objects random objects each consisting of
// fields_per_object fields, each of a random type.
for (int i = 0; i < num_fuzz_objects; i++) {
auto start = builder.StartTable();
for (flatbuffers::voffset_t f = 0; f < fields_per_object; f++) {
int choice = lcg_rand() % test_values_max;
auto off = flatbuffers::FieldIndexToOffset(f);
switch (choice) {
case 0: builder.AddElement<uint8_t >(off, bool_val, 0); break;
case 1: builder.AddElement<int8_t >(off, char_val, 0); break;
case 2: builder.AddElement<uint8_t >(off, uchar_val, 0); break;
case 3: builder.AddElement<int16_t >(off, short_val, 0); break;
case 4: builder.AddElement<uint16_t>(off, ushort_val, 0); break;
case 5: builder.AddElement<int32_t >(off, int_val, 0); break;
case 6: builder.AddElement<uint32_t>(off, uint_val, 0); break;
case 7: builder.AddElement<int64_t >(off, long_val, 0); break;
case 8: builder.AddElement<uint64_t>(off, ulong_val, 0); break;
case 9: builder.AddElement<float >(off, float_val, 0); break;
case 10: builder.AddElement<double >(off, double_val, 0); break;
}
}
objects[i] = builder.EndTable(start, fields_per_object);
}
builder.PreAlign<flatbuffers::largest_scalar_t>(0); // Align whole buffer.
lcg_reset(); // Reset.
uint8_t *eob = builder.GetCurrentBufferPointer() + builder.GetSize();
// Test that all objects we generated are readable and return the
// expected values. We generate random objects in the same order
// so this is deterministic.
for (int i = 0; i < num_fuzz_objects; i++) {
auto table = reinterpret_cast<flatbuffers::Table *>(eob - objects[i]);
for (flatbuffers::voffset_t f = 0; f < fields_per_object; f++) {
int choice = lcg_rand() % test_values_max;
flatbuffers::voffset_t off = flatbuffers::FieldIndexToOffset(f);
switch (choice) {
case 0: CompareTableFieldValue(table, off, bool_val ); break;
case 1: CompareTableFieldValue(table, off, char_val ); break;
case 2: CompareTableFieldValue(table, off, uchar_val ); break;
case 3: CompareTableFieldValue(table, off, short_val ); break;
case 4: CompareTableFieldValue(table, off, ushort_val); break;
case 5: CompareTableFieldValue(table, off, int_val ); break;
case 6: CompareTableFieldValue(table, off, uint_val ); break;
case 7: CompareTableFieldValue(table, off, long_val ); break;
case 8: CompareTableFieldValue(table, off, ulong_val ); break;
case 9: CompareTableFieldValue(table, off, float_val ); break;
case 10: CompareTableFieldValue(table, off, double_val); break;
}
}
}
}
// High level stress/fuzz test: generate a big schema and
// matching json data in random combinations, then parse both,
// generate json back from the binary, and compare with the original.
void FuzzTest2() {
lcg_reset(); // Keep it deterministic.
const int num_definitions = 30;
const int num_struct_definitions = 5; // Subset of num_definitions.
const int fields_per_definition = 15;
const int instances_per_definition = 5;
const int deprecation_rate = 10; // 1 in deprecation_rate fields will
// be deprecated.
std::string schema = "namespace test;\n\n";
struct RndDef {
std::string instances[instances_per_definition];
// Since we're generating schema and corresponding data in tandem,
// this convenience function adds strings to both at once.
static void Add(RndDef (&definitions_l)[num_definitions],
std::string &schema_l,
const int instances_per_definition_l,
const char *schema_add, const char *instance_add,
int definition) {
schema_l += schema_add;
for (int i = 0; i < instances_per_definition_l; i++)
definitions_l[definition].instances[i] += instance_add;
}
};
#define AddToSchemaAndInstances(schema_add, instance_add) \
RndDef::Add(definitions, schema, instances_per_definition, \
schema_add, instance_add, definition)
#define Dummy() \
RndDef::Add(definitions, schema, instances_per_definition, \
"byte", "1", definition)
RndDef definitions[num_definitions];
// We are going to generate num_definitions, the first
// num_struct_definitions will be structs, the rest tables. For each
// generate random fields, some of which may be struct/table types
// referring to previously generated structs/tables.
// Simultanenously, we generate instances_per_definition JSON data
// definitions, which will have identical structure to the schema
// being generated. We generate multiple instances such that when creating
// hierarchy, we get some variety by picking one randomly.
for (int definition = 0; definition < num_definitions; definition++) {
std::string definition_name = "D" + flatbuffers::NumToString(definition);
bool is_struct = definition < num_struct_definitions;
AddToSchemaAndInstances(
((is_struct ? "struct " : "table ") + definition_name + " {\n").c_str(),
"{\n");
for (int field = 0; field < fields_per_definition; field++) {
const bool is_last_field = field == fields_per_definition - 1;
// Deprecate 1 in deprecation_rate fields. Only table fields can be
// deprecated.
// Don't deprecate the last field to avoid dangling commas in JSON.
const bool deprecated = !is_struct &&
!is_last_field &&
(lcg_rand() % deprecation_rate == 0);
std::string field_name = "f" + flatbuffers::NumToString(field);
AddToSchemaAndInstances((" " + field_name + ":").c_str(),
deprecated ? "" : (field_name + ": ").c_str());
// Pick random type:
auto base_type = static_cast<flatbuffers::BaseType>(
lcg_rand() % (flatbuffers::BASE_TYPE_UNION + 1));
switch (base_type) {
case flatbuffers::BASE_TYPE_STRING:
if (is_struct) {
Dummy(); // No strings in structs.
} else {
AddToSchemaAndInstances("string", deprecated ? "" : "\"hi\"");
}
break;
case flatbuffers::BASE_TYPE_VECTOR:
if (is_struct) {
Dummy(); // No vectors in structs.
}
else {
AddToSchemaAndInstances("[ubyte]",
deprecated ? "" : "[\n0,\n1,\n255\n]");
}
break;
case flatbuffers::BASE_TYPE_NONE:
case flatbuffers::BASE_TYPE_UTYPE:
case flatbuffers::BASE_TYPE_STRUCT:
case flatbuffers::BASE_TYPE_UNION:
if (definition) {
// Pick a random previous definition and random data instance of
// that definition.
int defref = lcg_rand() % definition;
int instance = lcg_rand() % instances_per_definition;
AddToSchemaAndInstances(
("D" + flatbuffers::NumToString(defref)).c_str(),
deprecated
? ""
: definitions[defref].instances[instance].c_str());
} else {
// If this is the first definition, we have no definition we can
// refer to.
Dummy();
}
break;
case flatbuffers::BASE_TYPE_BOOL:
AddToSchemaAndInstances("bool", deprecated
? ""
: (lcg_rand() % 2 ? "true" : "false"));
break;
default:
// All the scalar types.
schema += flatbuffers::kTypeNames[base_type];
if (!deprecated) {
// We want each instance to use its own random value.
for (int inst = 0; inst < instances_per_definition; inst++)
definitions[definition].instances[inst] +=
flatbuffers::IsFloat(base_type)
? flatbuffers::NumToString<double>(lcg_rand() % 128).c_str()
: flatbuffers::NumToString<int>(lcg_rand() % 128).c_str();
}
}
AddToSchemaAndInstances(
deprecated ? "(deprecated);\n" : ";\n",
deprecated ? "" : is_last_field ? "\n" : ",\n");
}
AddToSchemaAndInstances("}\n\n", "}");
}
schema += "root_type D" + flatbuffers::NumToString(num_definitions - 1);
schema += ";\n";
flatbuffers::Parser parser;
// Will not compare against the original if we don't write defaults
parser.builder_.ForceDefaults(true);
// Parse the schema, parse the generated data, then generate text back
// from the binary and compare against the original.
TEST_EQ(parser.Parse(schema.c_str()), true);
const std::string &json =
definitions[num_definitions - 1].instances[0] + "\n";
TEST_EQ(parser.Parse(json.c_str()), true);
std::string jsongen;
parser.opts.indent_step = 0;
auto result = GenerateText(parser, parser.builder_.GetBufferPointer(), &jsongen);
TEST_EQ(result, true);
if (jsongen != json) {
// These strings are larger than a megabyte, so we show the bytes around
// the first bytes that are different rather than the whole string.
size_t len = std::min(json.length(), jsongen.length());
for (size_t i = 0; i < len; i++) {
if (json[i] != jsongen[i]) {
i -= std::min(static_cast<size_t>(10), i); // show some context;
size_t end = std::min(len, i + 20);
for (; i < end; i++)
printf("at %d: found \"%c\", expected \"%c\"\n",
static_cast<int>(i), jsongen[i], json[i]);
break;
}
}
TEST_NOTNULL(NULL);
}
printf("%dk schema tested with %dk of json\n",
static_cast<int>(schema.length() / 1024),
static_cast<int>(json.length() / 1024));
}
// Test that parser errors are actually generated.
void TestError(const char *src, const char *error_substr,
bool strict_json = false) {
flatbuffers::IDLOptions opts;
opts.strict_json = strict_json;
flatbuffers::Parser parser(opts);
TEST_EQ(parser.Parse(src), false); // Must signal error
// Must be the error we're expecting
TEST_NOTNULL(strstr(parser.error_.c_str(), error_substr));
}
// Test that parsing errors occur as we'd expect.
// Also useful for coverage, making sure these paths are run.
void ErrorTest() {
// In order they appear in idl_parser.cpp
TestError("table X { Y:byte; } root_type X; { Y: 999 }", "does not fit");
TestError(".0", "floating point");
TestError("\"\0", "illegal");
TestError("\"\\q", "escape code");
TestError("table ///", "documentation");
TestError("@", "illegal");
TestError("table 1", "expecting");
TestError("table X { Y:[[int]]; }", "nested vector");
TestError("table X { Y:1; }", "illegal type");
TestError("table X { Y:int; Y:int; }", "field already");
TestError("table X { X:int; }", "same as table");
TestError("struct X { Y:string; }", "only scalar");
TestError("struct X { Y:int (deprecated); }", "deprecate");
TestError("union Z { X } table X { Y:Z; } root_type X; { Y: {}, A:1 }",
"missing type field");
TestError("union Z { X } table X { Y:Z; } root_type X; { Y_type: 99, Y: {",
"type id");
TestError("table X { Y:int; } root_type X; { Z:", "unknown field");
TestError("table X { Y:int; } root_type X; { Y:", "string constant", true);
TestError("table X { Y:int; } root_type X; { \"Y\":1, }", "string constant",
true);
TestError("struct X { Y:int; Z:int; } table W { V:X; } root_type W; "
"{ V:{ Y:1 } }", "wrong number");
TestError("enum E:byte { A } table X { Y:E; } root_type X; { Y:U }",
"unknown enum value");
TestError("table X { Y:byte; } root_type X; { Y:; }", "starting");
TestError("enum X:byte { Y } enum X {", "enum already");
TestError("enum X:float {}", "underlying");
TestError("enum X:byte { Y, Y }", "value already");
TestError("enum X:byte { Y=2, Z=1 }", "ascending");
TestError("union X { Y = 256 }", "must fit");
TestError("enum X:byte (bit_flags) { Y=8 }", "bit flag out");
TestError("table X { Y:int; } table X {", "datatype already");
TestError("struct X (force_align: 7) { Y:int; }", "force_align");
TestError("{}", "no root");
TestError("table X { Y:byte; } root_type X; { Y:1 } { Y:1 }", "one json");
TestError("root_type X;", "unknown root");
TestError("struct X { Y:int; } root_type X;", "a table");
TestError("union X { Y }", "referenced");
TestError("union Z { X } struct X { Y:int; }", "only tables");
TestError("table X { Y:[int]; YLength:int; }", "clash");
TestError("table X { Y:string = 1; }", "scalar");
TestError("table X { Y:byte; } root_type X; { Y:1, Y:2 }", "more than once");
}
template<typename T> T TestValue(const char *json, const char *type_name) {
flatbuffers::Parser parser;
// Simple schema.
TEST_EQ(parser.Parse(std::string("table X { Y:" + std::string(type_name) +
"; } root_type X;").c_str()), true);
TEST_EQ(parser.Parse(json), true);
auto root = flatbuffers::GetRoot<flatbuffers::Table>(
parser.builder_.GetBufferPointer());
return root->GetField<T>(flatbuffers::FieldIndexToOffset(0), 0);
}
bool FloatCompare(float a, float b) { return fabs(a - b) < 0.001; }
// Additional parser testing not covered elsewhere.
void ValueTest() {
// Test scientific notation numbers.
TEST_EQ(FloatCompare(TestValue<float>("{ Y:0.0314159e+2 }","float"),
(float)3.14159), true);
// Test conversion functions.
TEST_EQ(FloatCompare(TestValue<float>("{ Y:cos(rad(180)) }","float"), -1),
true);
// Test negative hex constant.
TEST_EQ(TestValue<int>("{ Y:-0x80 }","int"), -128);
// Make sure we do unsigned 64bit correctly.
TEST_EQ(TestValue<uint64_t>("{ Y:12335089644688340133 }","ulong"),
12335089644688340133ULL);
}
void NestedListTest() {
flatbuffers::Parser parser1;
TEST_EQ(parser1.Parse("struct Test { a:short; b:byte; } table T { F:[Test]; }"
"root_type T;"
"{ F:[ [10,20], [30,40]] }"), true);
}
void EnumStringsTest() {
flatbuffers::Parser parser1;
TEST_EQ(parser1.Parse("enum E:byte { A, B, C } table T { F:[E]; }"
"root_type T;"
"{ F:[ A, B, \"C\", \"A B C\" ] }"), true);
flatbuffers::Parser parser2;
TEST_EQ(parser2.Parse("enum E:byte { A, B, C } table T { F:[int]; }"
"root_type T;"
"{ F:[ \"E.C\", \"E.A E.B E.C\" ] }"), true);
}
void IntegerOutOfRangeTest() {
TestError("table T { F:byte; } root_type T; { F:128 }",
"constant does not fit");
TestError("table T { F:byte; } root_type T; { F:-129 }",
"constant does not fit");
TestError("table T { F:ubyte; } root_type T; { F:256 }",
"constant does not fit");
TestError("table T { F:ubyte; } root_type T; { F:-1 }",
"constant does not fit");
TestError("table T { F:short; } root_type T; { F:32768 }",
"constant does not fit");
TestError("table T { F:short; } root_type T; { F:-32769 }",
"constant does not fit");
TestError("table T { F:ushort; } root_type T; { F:65536 }",
"constant does not fit");
TestError("table T { F:ushort; } root_type T; { F:-1 }",
"constant does not fit");
TestError("table T { F:int; } root_type T; { F:2147483648 }",
"constant does not fit");
TestError("table T { F:int; } root_type T; { F:-2147483649 }",
"constant does not fit");
TestError("table T { F:uint; } root_type T; { F:4294967296 }",
"constant does not fit");
TestError("table T { F:uint; } root_type T; { F:-1 }",
"constant does not fit");
}
void IntegerBoundaryTest() {
TEST_EQ(TestValue<int8_t>("{ Y:127 }","byte"), 127);
TEST_EQ(TestValue<int8_t>("{ Y:-128 }","byte"), -128);
TEST_EQ(TestValue<uint8_t>("{ Y:255 }","ubyte"), 255);
TEST_EQ(TestValue<uint8_t>("{ Y:0 }","ubyte"), 0);
TEST_EQ(TestValue<int16_t>("{ Y:32767 }","short"), 32767);
TEST_EQ(TestValue<int16_t>("{ Y:-32768 }","short"), -32768);
TEST_EQ(TestValue<uint16_t>("{ Y:65535 }","ushort"), 65535);
TEST_EQ(TestValue<uint16_t>("{ Y:0 }","ushort"), 0);
TEST_EQ(TestValue<int32_t>("{ Y:2147483647 }","int"), 2147483647);
TEST_EQ(TestValue<int32_t>("{ Y:-2147483648 }","int"), (-2147483647 - 1));
TEST_EQ(TestValue<uint32_t>("{ Y:4294967295 }","uint"), 4294967295);
TEST_EQ(TestValue<uint32_t>("{ Y:0 }","uint"), 0);
TEST_EQ(TestValue<int64_t>("{ Y:9223372036854775807 }","long"), 9223372036854775807);
TEST_EQ(TestValue<int64_t>("{ Y:-9223372036854775808 }","long"), (-9223372036854775807 - 1));
TEST_EQ(TestValue<uint64_t>("{ Y:18446744073709551615 }","ulong"), 18446744073709551615U);
TEST_EQ(TestValue<uint64_t>("{ Y:0 }","ulong"), 0);
}
void UnicodeTest() {
flatbuffers::Parser parser;
// Without setting allow_non_utf8 = true, we treat \x sequences as byte sequences
// which are then validated as UTF-8.
TEST_EQ(parser.Parse("table T { F:string; }"
"root_type T;"
"{ F:\"\\u20AC\\u00A2\\u30E6\\u30FC\\u30B6\\u30FC"
"\\u5225\\u30B5\\u30A4\\u30C8\\xE2\\x82\\xAC\\u0080\\uD83D\\uDE0E\" }"),
true);
std::string jsongen;
parser.opts.indent_step = -1;
auto result = GenerateText(parser, parser.builder_.GetBufferPointer(), &jsongen);
TEST_EQ(result, true);
TEST_EQ(jsongen,
std::string(
"{F: \"\\u20AC\\u00A2\\u30E6\\u30FC\\u30B6\\u30FC"
"\\u5225\\u30B5\\u30A4\\u30C8\\u20AC\\u0080\\uD83D\\uDE0E\"}"));
}
void UnicodeTestAllowNonUTF8() {
flatbuffers::Parser parser;
parser.opts.allow_non_utf8 = true;
TEST_EQ(parser.Parse("table T { F:string; }"
"root_type T;"
"{ F:\"\\u20AC\\u00A2\\u30E6\\u30FC\\u30B6\\u30FC"
"\\u5225\\u30B5\\u30A4\\u30C8\\x01\\x80\\u0080\\uD83D\\uDE0E\" }"), true);
std::string jsongen;
parser.opts.indent_step = -1;
auto result = GenerateText(parser, parser.builder_.GetBufferPointer(), &jsongen);
TEST_EQ(result, true);
TEST_EQ(jsongen,
std::string(
"{F: \"\\u20AC\\u00A2\\u30E6\\u30FC\\u30B6\\u30FC"
"\\u5225\\u30B5\\u30A4\\u30C8\\u0001\\x80\\u0080\\uD83D\\uDE0E\"}"));
}
void UnicodeTestGenerateTextFailsOnNonUTF8() {
flatbuffers::Parser parser;
// Allow non-UTF-8 initially to model what happens when we load a binary flatbuffer from disk
// which contains non-UTF-8 strings.
parser.opts.allow_non_utf8 = true;
TEST_EQ(parser.Parse("table T { F:string; }"
"root_type T;"
"{ F:\"\\u20AC\\u00A2\\u30E6\\u30FC\\u30B6\\u30FC"
"\\u5225\\u30B5\\u30A4\\u30C8\\x01\\x80\\u0080\\uD83D\\uDE0E\" }"), true);
std::string jsongen;
parser.opts.indent_step = -1;
// Now, disallow non-UTF-8 (the default behavior) so GenerateText indicates failure.
parser.opts.allow_non_utf8 = false;
auto result = GenerateText(parser, parser.builder_.GetBufferPointer(), &jsongen);
TEST_EQ(result, false);
}
void UnicodeSurrogatesTest() {
flatbuffers::Parser parser;
TEST_EQ(
parser.Parse(
"table T { F:string (id: 0); }"
"root_type T;"
"{ F:\"\\uD83D\\uDCA9\"}"), true);
auto root = flatbuffers::GetRoot<flatbuffers::Table>(
parser.builder_.GetBufferPointer());
auto string = root->GetPointer<flatbuffers::String *>(
flatbuffers::FieldIndexToOffset(0));
TEST_EQ(strcmp(string->c_str(), "\xF0\x9F\x92\xA9"), 0);
}
void UnicodeInvalidSurrogatesTest() {
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\\uD800\"}", "unpaired high surrogate");
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\\uD800abcd\"}", "unpaired high surrogate");
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\\uD800\\n\"}", "unpaired high surrogate");
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\\uD800\\uD800\"}", "multiple high surrogates");
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\\uDC00\"}", "unpaired low surrogate");
}
void InvalidUTF8Test() {
// "1 byte" pattern, under min length of 2 bytes
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\x80\"}", "illegal UTF-8 sequence");
// 2 byte pattern, string too short
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xDF\"}", "illegal UTF-8 sequence");
// 3 byte pattern, string too short
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xEF\xBF\"}", "illegal UTF-8 sequence");
// 4 byte pattern, string too short
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xF7\xBF\xBF\"}", "illegal UTF-8 sequence");
// "5 byte" pattern, string too short
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xFB\xBF\xBF\xBF\"}", "illegal UTF-8 sequence");
// "6 byte" pattern, string too short
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xFD\xBF\xBF\xBF\xBF\"}", "illegal UTF-8 sequence");
// "7 byte" pattern, string too short
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xFE\xBF\xBF\xBF\xBF\xBF\"}", "illegal UTF-8 sequence");
// "5 byte" pattern, over max length of 4 bytes
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xFB\xBF\xBF\xBF\xBF\"}", "illegal UTF-8 sequence");
// "6 byte" pattern, over max length of 4 bytes
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xFD\xBF\xBF\xBF\xBF\xBF\"}", "illegal UTF-8 sequence");
// "7 byte" pattern, over max length of 4 bytes
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xFE\xBF\xBF\xBF\xBF\xBF\xBF\"}", "illegal UTF-8 sequence");
// Three invalid encodings for U+000A (\n, aka NEWLINE)
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xC0\x8A\"}", "illegal UTF-8 sequence");
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xE0\x80\x8A\"}", "illegal UTF-8 sequence");
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xF0\x80\x80\x8A\"}", "illegal UTF-8 sequence");
// Two invalid encodings for U+00A9 (COPYRIGHT SYMBOL)
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xE0\x81\xA9\"}", "illegal UTF-8 sequence");
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xF0\x80\x81\xA9\"}", "illegal UTF-8 sequence");
// Invalid encoding for U+20AC (EURO SYMBOL)
TestError(
"table T { F:string; }"
"root_type T;"
"{ F:\"\xF0\x82\x82\xAC\"}", "illegal UTF-8 sequence");
// UTF-16 surrogate values between U+D800 and U+DFFF cannot be encoded in UTF-8
TestError(
"table T { F:string; }"
"root_type T;"
// U+10400 "encoded" as U+D801 U+DC00
"{ F:\"\xED\xA0\x81\xED\xB0\x80\"}", "illegal UTF-8 sequence");
}
void UnknownFieldsTest() {
flatbuffers::IDLOptions opts;
opts.skip_unexpected_fields_in_json = true;
flatbuffers::Parser parser(opts);
TEST_EQ(parser.Parse("table T { str:string; i:int;}"
"root_type T;"
"{ str:\"test\","
"unknown_string:\"test\","
"\"unknown_string\":\"test\","
"unknown_int:10,"
"unknown_float:1.0,"
"unknown_array: [ 1, 2, 3, 4],"
"unknown_object: { i: 10 },"
"\"unknown_object\": { \"i\": 10 },"
"i:10}"), true);
std::string jsongen;
parser.opts.indent_step = -1;
auto result = GenerateText(parser, parser.builder_.GetBufferPointer(), &jsongen);
TEST_EQ(result, true);
TEST_EQ(jsongen == "{str: \"test\",i: 10}", true);
}
void ParseUnionTest() {
// Unions must be parseable with the type field following the object.
flatbuffers::Parser parser;
TEST_EQ(parser.Parse("table T { A:int; }"
"union U { T }"
"table V { X:U; }"
"root_type V;"
"{ X:{ A:1 }, X_type: T }"), true);
// Unions must be parsable with prefixed namespace.
flatbuffers::Parser parser2;
TEST_EQ(parser2.Parse("namespace N; table A {} namespace; union U { N.A }"
"table B { e:U; } root_type B;"
"{ e_type: N_A, e: {} }"), true);
}
void UnionVectorTest() {
// load FlatBuffer fbs schema.
// TODO: load a JSON file with such a vector when JSON support is ready.
std::string schemafile;
TEST_EQ(flatbuffers::LoadFile(
(test_data_path + "union_vector/union_vector.fbs").c_str(), false,
&schemafile), true);
// parse schema.
flatbuffers::IDLOptions idl_opts;
idl_opts.lang_to_generate |= flatbuffers::IDLOptions::kCpp;
flatbuffers::Parser parser(idl_opts);
TEST_EQ(parser.Parse(schemafile.c_str()), true);
flatbuffers::FlatBufferBuilder fbb;
// union types.
std::vector<uint8_t> types;
types.push_back(static_cast<uint8_t>(Character_Belle));
types.push_back(static_cast<uint8_t>(Character_MuLan));
types.push_back(static_cast<uint8_t>(Character_BookFan));
types.push_back(static_cast<uint8_t>(Character_Other));
types.push_back(static_cast<uint8_t>(Character_Unused));
// union values.
std::vector<flatbuffers::Offset<void>> characters;
characters.push_back(fbb.CreateStruct(BookReader(/*books_read=*/7)).Union());
characters.push_back(CreateAttacker(fbb, /*sword_attack_damage=*/5).Union());
characters.push_back(fbb.CreateStruct(BookReader(/*books_read=*/2)).Union());
characters.push_back(fbb.CreateString("Other").Union());
characters.push_back(fbb.CreateString("Unused").Union());
// create Movie.
const auto movie_offset =
CreateMovie(fbb,
Character_Rapunzel,
fbb.CreateStruct(Rapunzel(/*hair_length=*/6)).Union(),
fbb.CreateVector(types),
fbb.CreateVector(characters));
FinishMovieBuffer(fbb, movie_offset);
auto buf = fbb.GetBufferPointer();
flatbuffers::Verifier verifier(buf, fbb.GetSize());
TEST_EQ(VerifyMovieBuffer(verifier), true);
auto flat_movie = GetMovie(buf);
auto TestMovie = [](const Movie *movie) {
TEST_EQ(movie->main_character_type() == Character_Rapunzel, true);
auto cts = movie->characters_type();
TEST_EQ(movie->characters_type()->size(), 5);
TEST_EQ(cts->GetEnum<Character>(0) == Character_Belle, true);
TEST_EQ(cts->GetEnum<Character>(1) == Character_MuLan, true);
TEST_EQ(cts->GetEnum<Character>(2) == Character_BookFan, true);
TEST_EQ(cts->GetEnum<Character>(3) == Character_Other, true);
TEST_EQ(cts->GetEnum<Character>(4) == Character_Unused, true);
auto rapunzel = movie->main_character_as_Rapunzel();
TEST_EQ(rapunzel->hair_length(), 6);
auto cs = movie->characters();
TEST_EQ(cs->size(), 5);
auto belle = cs->GetAs<BookReader>(0);
TEST_EQ(belle->books_read(), 7);
auto mu_lan = cs->GetAs<Attacker>(1);
TEST_EQ(mu_lan->sword_attack_damage(), 5);
auto book_fan = cs->GetAs<BookReader>(2);
TEST_EQ(book_fan->books_read(), 2);
auto other = cs->GetAsString(3);
TEST_EQ_STR(other->c_str(), "Other");
auto unused = cs->GetAsString(4);
TEST_EQ_STR(unused->c_str(), "Unused");
};
TestMovie(flat_movie);
auto movie_object = flat_movie->UnPack();
TEST_EQ(movie_object->main_character.AsRapunzel()->hair_length(), 6);
TEST_EQ(movie_object->characters[0].AsBelle()->books_read(), 7);
TEST_EQ(movie_object->characters[1].AsMuLan()->sword_attack_damage, 5);
TEST_EQ(movie_object->characters[2].AsBookFan()->books_read(), 2);
TEST_EQ_STR(movie_object->characters[3].AsOther()->c_str(), "Other");
TEST_EQ_STR(movie_object->characters[4].AsUnused()->c_str(), "Unused");
fbb.Clear();
fbb.Finish(Movie::Pack(fbb, movie_object));
auto repacked_movie = GetMovie(fbb.GetBufferPointer());
TestMovie(repacked_movie);
}
void ConformTest() {
flatbuffers::Parser parser;
TEST_EQ(parser.Parse("table T { A:int; } enum E:byte { A }"), true);
auto test_conform = [&](const char *test, const char *expected_err) {
flatbuffers::Parser parser2;
TEST_EQ(parser2.Parse(test), true);
auto err = parser2.ConformTo(parser);
TEST_NOTNULL(strstr(err.c_str(), expected_err));
};
test_conform("table T { A:byte; }", "types differ for field");
test_conform("table T { B:int; A:int; }", "offsets differ for field");
test_conform("table T { A:int = 1; }", "defaults differ for field");
test_conform("table T { B:float; }", "field renamed to different type");
test_conform("enum E:byte { B, A }", "values differ for enum");
}
void ParseProtoBufAsciiTest() {
// We can put the parser in a mode where it will accept JSON that looks more
// like Protobuf ASCII, for users that have data in that format.
// This uses no "" for field names (which we already support by default,
// omits `,`, `:` before `{` and a couple of other features.
flatbuffers::Parser parser;
parser.opts.protobuf_ascii_alike = true;
TEST_EQ(parser.Parse(
"table S { B:int; } table T { A:[int]; C:S; } root_type T;"), true);
TEST_EQ(parser.Parse("{ A [1 2] C { B:2 }}"), true);
// Similarly, in text output, it should omit these.
std::string text;
auto ok = flatbuffers::GenerateText(parser,
parser.builder_.GetBufferPointer(),
&text);
TEST_EQ(ok, true);
TEST_EQ_STR(text.c_str(),
"{\n A [\n 1\n 2\n ]\n C {\n B: 2\n }\n}\n");
}
void FlexBuffersTest() {
flexbuffers::Builder slb(512,
flexbuffers::BUILDER_FLAG_SHARE_KEYS_AND_STRINGS);
// Write the equivalent of:
// { vec: [ -100, "Fred", 4.0 ], bar: [ 1, 2, 3 ], foo: 100 }
slb.Map([&]() {
slb.Vector("vec", [&]() {
slb += -100; // Equivalent to slb.Add(-100) or slb.Int(-100);
slb += "Fred";
slb.IndirectFloat(4.0f);
});
int ints[] = { 1, 2, 3 };
slb.Vector("bar", ints, 3);
slb.FixedTypedVector("bar3", ints, 3);
slb.Double("foo", 100);
slb.Map("mymap", [&]() {
slb.String("foo", "Fred"); // Testing key and string reuse.
});
});
slb.Finish();
for (size_t i = 0; i < slb.GetBuffer().size(); i++)
printf("%d ", slb.GetBuffer().data()[i]);
printf("\n");
auto map = flexbuffers::GetRoot(slb.GetBuffer()).AsMap();
TEST_EQ(map.size(), 5);
auto vec = map["vec"].AsVector();
TEST_EQ(vec.size(), 3);
TEST_EQ(vec[0].AsInt64(), -100);
TEST_EQ_STR(vec[1].AsString().c_str(), "Fred");
TEST_EQ(vec[1].AsInt64(), 0); // Number parsing failed.
TEST_EQ(vec[2].AsDouble(), 4.0);
TEST_EQ(vec[2].AsString().IsTheEmptyString(), true); // Wrong Type.
TEST_EQ_STR(vec[2].AsString().c_str(), ""); // This still works though.
TEST_EQ_STR(vec[2].ToString().c_str(), "4.0"); // Or have it converted.
auto tvec = map["bar"].AsTypedVector();
TEST_EQ(tvec.size(), 3);
TEST_EQ(tvec[2].AsInt8(), 3);
auto tvec3 = map["bar3"].AsFixedTypedVector();
TEST_EQ(tvec3.size(), 3);
TEST_EQ(tvec3[2].AsInt8(), 3);
TEST_EQ(map["foo"].AsUInt8(), 100);
TEST_EQ(map["unknown"].IsNull(), true);
auto mymap = map["mymap"].AsMap();
// These should be equal by pointer equality, since key and value are shared.
TEST_EQ(mymap.Keys()[0].AsKey(), map.Keys()[2].AsKey());
TEST_EQ(mymap.Values()[0].AsString().c_str(), vec[1].AsString().c_str());
// We can mutate values in the buffer.
TEST_EQ(vec[0].MutateInt(-99), true);
TEST_EQ(vec[0].AsInt64(), -99);
TEST_EQ(vec[1].MutateString("John"), true); // Size must match.
TEST_EQ_STR(vec[1].AsString().c_str(), "John");
TEST_EQ(vec[1].MutateString("Alfred"), false); // Too long.
TEST_EQ(vec[2].MutateFloat(2.0f), true);
TEST_EQ(vec[2].AsFloat(), 2.0f);
TEST_EQ(vec[2].MutateFloat(3.14159), false); // Double does not fit in float.
// Parse from JSON:
flatbuffers::Parser parser;
slb.Clear();
auto jsontest = "{ a: [ 123, 456.0 ], b: \"hello\" }";
TEST_EQ(parser.ParseFlexBuffer(jsontest, nullptr, &slb),
true);
auto jroot = flexbuffers::GetRoot(slb.GetBuffer());
auto jmap = jroot.AsMap();
auto jvec = jmap["a"].AsVector();
TEST_EQ(jvec[0].AsInt64(), 123);
TEST_EQ(jvec[1].AsDouble(), 456.0);
TEST_EQ_STR(jmap["b"].AsString().c_str(), "hello");
// And from FlexBuffer back to JSON:
auto jsonback = jroot.ToString();
TEST_EQ_STR(jsontest, jsonback.c_str());
}
int main(int /*argc*/, const char * /*argv*/[]) {
// Run our various test suites:
std::string rawbuf;
auto flatbuf1 = CreateFlatBufferTest(rawbuf);
auto flatbuf = std::move(flatbuf1); // Test move assignment.
AccessFlatBufferTest(reinterpret_cast<const uint8_t *>(rawbuf.c_str()),
rawbuf.length());
AccessFlatBufferTest(flatbuf.data(), flatbuf.size());
MutateFlatBuffersTest(flatbuf.data(), flatbuf.size());
ObjectFlatBuffersTest(flatbuf.data());
SizePrefixedTest();
#ifndef FLATBUFFERS_NO_FILE_TESTS
#ifdef FLATBUFFERS_TEST_PATH_PREFIX
test_data_path = FLATBUFFERS_STRING(FLATBUFFERS_TEST_PATH_PREFIX) +
test_data_path;
#endif
ParseAndGenerateTextTest();
ReflectionTest(flatbuf.data(), flatbuf.size());
ParseProtoTest();
UnionVectorTest();
#endif
FuzzTest1();
FuzzTest2();
ErrorTest();
ValueTest();
EnumStringsTest();
IntegerOutOfRangeTest();
IntegerBoundaryTest();
UnicodeTest();
UnicodeTestAllowNonUTF8();
UnicodeTestGenerateTextFailsOnNonUTF8();
UnicodeSurrogatesTest();
UnicodeInvalidSurrogatesTest();
InvalidUTF8Test();
UnknownFieldsTest();
ParseUnionTest();
ConformTest();
ParseProtoBufAsciiTest();
FlexBuffersTest();
if (!testing_fails) {
TEST_OUTPUT_LINE("ALL TESTS PASSED");
return 0;
} else {
TEST_OUTPUT_LINE("%d FAILED TESTS", testing_fails);
return 1;
}
}
| apache-2.0 |
OpenGamma/Strata | modules/math/src/test/java/com/opengamma/strata/math/impl/integration/GaussHermiteWeightAndAbscissaFunctionTest.java | 1586 | /*
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.math.impl.integration;
import org.junit.jupiter.api.Test;
/**
* Test.
*/
public class GaussHermiteWeightAndAbscissaFunctionTest extends WeightAndAbscissaFunctionTestCase {
private static final double SQRT_PI = Math.sqrt(Math.PI);
private static final double DENOM1 = 4 * (3 - Math.sqrt(6));
private static final double DENOM2 = 4 * (3 + Math.sqrt(6));
private static final double[] X2 = new double[] {-Math.sqrt(2) / 2., Math.sqrt(2) / 2.};
private static final double[] W2 = new double[] {SQRT_PI / 2., SQRT_PI / 2.};
private static final double[] X3 = new double[] {-Math.sqrt(6) / 2., 0, Math.sqrt(6) / 2.};
private static final double[] W3 = new double[] {SQRT_PI / 6., 2 * SQRT_PI / 3., SQRT_PI / 6.};
private static final double[] X4 = new double[] {-Math.sqrt((3 + Math.sqrt(6)) / 2.), -Math.sqrt((3 - Math.sqrt(6)) / 2.),
Math.sqrt((3 - Math.sqrt(6)) / 2.), Math.sqrt((3 + Math.sqrt(6)) / 2.)};
private static final double[] W4 = new double[] {SQRT_PI / DENOM2, SQRT_PI / DENOM1, SQRT_PI / DENOM1, SQRT_PI / DENOM2};
private static final QuadratureWeightAndAbscissaFunction F = new GaussHermiteWeightAndAbscissaFunction();
@Override
protected QuadratureWeightAndAbscissaFunction getFunction() {
return F;
}
@Test
public void test() {
assertResults(F.generate(2), X2, W2);
assertResults(F.generate(3), X3, W3);
assertResults(F.generate(4), X4, W4);
}
}
| apache-2.0 |
yexianyi/Cho-Ko-Nu | chukonu_java/src/main/java/com/yxy/chukonu/java/dp/observer/Test.java | 292 | package com.yxy.chukonu.java.dp.observer;
public class Test {
public static void main(String[] args) {
ObserverableOrder order = new ObserverableOrder(1.10f) ;
order.addObserver(new ObserverA("Email"));
order.addObserver(new ObserverA("Phone"));
order.modifyPrice(2.20f);
}
}
| apache-2.0 |
vic/byte-buddy | byte-buddy-dep/src/main/java/net/bytebuddy/description/ModifierReviewable.java | 8191 | package net.bytebuddy.description;
import org.objectweb.asm.Opcodes;
import java.lang.reflect.Modifier;
/**
* Implementations of this interface can be described in terms of a Java modifier.
*/
public interface ModifierReviewable {
/**
* Representation of the default modifier.
*/
int EMPTY_MASK = 0;
/**
* Specifies if the modifier described by this object is {@code final}.
*
* @return {@code true} if the modifier described by this object is {@code final}.
*/
boolean isFinal();
/**
* Specifies if the modifier described by this object is {@code static}.
*
* @return {@code true} if the modifier described by this object is {@code static}.
*/
boolean isStatic();
/**
* Specifies if the modifier described by this object is {@code public}.
*
* @return {@code true} if the modifier described by this object is {@code public}.
*/
boolean isPublic();
/**
* Specifies if the modifier described by this object is {@code protected}.
*
* @return {@code true} if the modifier described by this object is {@code protected}.
*/
boolean isProtected();
/**
* Specifies if the modifier described by this object is package private.
*
* @return {@code true} if the modifier described by this object is package private.
*/
boolean isPackagePrivate();
/**
* Specifies if the modifier described by this object is {@code private}.
*
* @return {@code true} if the modifier described by this object is {@code private}.
*/
boolean isPrivate();
/**
* Specifies if the modifier described by this object is {@code abstract}.
*
* @return {@code true} if the modifier described by this object is {@code abstract}.
*/
boolean isAbstract();
/**
* Specifies if the modifier described by this object is {@code native}.
*
* @return {@code true} if the modifier described by this object is {@code native}.
*/
boolean isNative();
/**
* Specifies if the modifier described by this object is {@code synchronized}.
*
* @return {@code true} if the modifier described by this object is {@code synchronized}.
*/
boolean isSynchronized();
/**
* Specifies if the modifier described by this object is {@code strictfp}.
*
* @return {@code true} if the modifier described by this object is {@code strictfp}.
*/
boolean isStrict();
/**
* Specifies if the modifier described by this object is synthetic.
*
* @return {@code true} if the modifier described by this object is synthetic.
*/
boolean isSynthetic();
/**
* CSpecifies if the modifier described by this object is mandated.
*
* @return {@code true} if the modifier described by this object is mandated.
*/
boolean isMandated();
/**
* Specifies if the modifier described by this object reflects the type super flag.
*
* @return {@code true} if the modifier described by this object reflects the type super flag.
*/
boolean isSuper();
/**
* Specifies if the modifier described by this object represents the bridge flag.
*
* @return {@code true} if the modifier described by this object represents the bridge flag
*/
boolean isBridge();
/**
* Specifies if the modifier described by this object represents the deprecated flag.
*
* @return {@code true} if the modifier described by this object represents the deprecated flag.
*/
boolean isDeprecated();
/**
* Specifies if the modifier described by this object represents the annotation flag.
*
* @return {@code true} if the modifier described by this object represents the annotation flag.
*/
boolean isAnnotation();
/**
* Specifies if the modifier described by this object represents the enum flag.
*
* @return {@code true} if the modifier described by this object represents the enum flag.
*/
boolean isEnum();
/**
* Specifies if the modifier described by this object represents the interface flag.
*
* @return {@code true} if the modifier described by this object represents the interface flag.
*/
boolean isInterface();
/**
* Specifies if the modifier described by this object represents the transient flag.
*
* @return {@code true} if the modifier described by this object represents the transient flag.
*/
boolean isTransient();
/**
* Specifies if the modifier described by this object represents the volatile flag.
*
* @return {@code true} if the modifier described by this object represents the volatile flag.
*/
boolean isVolatile();
/**
* Specifies if the modifier described by this object represents the var args flag.
*
* @return {@code true} if the modifier described by this object represents the var args flag.
*/
boolean isVarArgs();
/**
* Returns the modifier that is described by this object.
*
* @return The modifier that is described by this object.
*/
int getModifiers();
/**
* An abstract base implementation of a {@link ModifierReviewable} class.
*/
abstract class AbstractBase implements ModifierReviewable {
@Override
public boolean isAbstract() {
return matchesMask(Modifier.ABSTRACT);
}
@Override
public boolean isFinal() {
return matchesMask(Modifier.FINAL);
}
@Override
public boolean isStatic() {
return matchesMask(Modifier.STATIC);
}
@Override
public boolean isPublic() {
return matchesMask(Modifier.PUBLIC);
}
@Override
public boolean isProtected() {
return matchesMask(Modifier.PROTECTED);
}
@Override
public boolean isPackagePrivate() {
return !isPublic() && !isProtected() && !isPrivate();
}
@Override
public boolean isPrivate() {
return matchesMask(Modifier.PRIVATE);
}
@Override
public boolean isNative() {
return matchesMask(Modifier.NATIVE);
}
@Override
public boolean isSynchronized() {
return matchesMask(Modifier.SYNCHRONIZED);
}
@Override
public boolean isStrict() {
return matchesMask(Modifier.STRICT);
}
@Override
public boolean isMandated() {
return matchesMask(Opcodes.ACC_MANDATED);
}
@Override
public boolean isSynthetic() {
return matchesMask(Opcodes.ACC_SYNTHETIC);
}
@Override
public boolean isSuper() {
return matchesMask(Opcodes.ACC_SUPER);
}
@Override
public boolean isBridge() {
return matchesMask(Opcodes.ACC_BRIDGE);
}
@Override
public boolean isDeprecated() {
return matchesMask(Opcodes.ACC_DEPRECATED);
}
@Override
public boolean isAnnotation() {
return matchesMask(Opcodes.ACC_ANNOTATION);
}
@Override
public boolean isEnum() {
return matchesMask(Opcodes.ACC_ENUM);
}
@Override
public boolean isInterface() {
return matchesMask(Opcodes.ACC_INTERFACE);
}
@Override
public boolean isTransient() {
return matchesMask(Opcodes.ACC_TRANSIENT);
}
@Override
public boolean isVolatile() {
return matchesMask(Opcodes.ACC_VOLATILE);
}
@Override
public boolean isVarArgs() {
return matchesMask(Opcodes.ACC_VARARGS);
}
/**
* Checks if a mask is matched by this instance.
*
* @param mask The mask to check.
* @return {@code true} if the mask is matched.
*/
private boolean matchesMask(int mask) {
return (getModifiers() & mask) == mask;
}
}
}
| apache-2.0 |
leisir2017/dyp | app/models/UserShopInvoice.php | 318 | <?php
class UserShopInvoice extends \Phalcon\Mvc\Model{
public $id;
public $uid;
public $bili;
public $type;
public $addtime;
public function initialize(){
$this->belongsTo("uid","User","id");
}
public function beforeSave(){
$this->addtime = date("Y-m-d H:i:s",time());
}
} | apache-2.0 |
gsoundar/hadoop-bench | src/main/java/mambo/tools/dfsio/Analyzer.java | 4731 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package mambo.tools.dfsio;
import java.io.IOException;
import java.util.StringTokenizer;
import mambo.tools.dfsio.TestDFSIOEnh.LinearInterpolator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class Analyzer {
public static class _Mapper extends Mapper<Object, Text, Text, Text> {
private Text t = new Text();
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
StringTokenizer tokens = new StringTokenizer(value.toString(), " \t\n\r\f%");
String attr = tokens.nextToken();
if (attr.endsWith(":tput_samples")){
String[] tags=attr.split(":");
String[] samples = tokens.nextToken().split(";");
for(int j=0; !samples[j].startsWith("EoR"); j++){
t.set(samples[j]);
context.write(new Text(tags[1]), t);
}
}
}
}
public static class _Reducer extends Reducer<Text, Text, Text, Text> {
private Text result = new Text();
private int plotInterval;
private long sampleUnit;
private long execTime;
private long fileSize;
private long tStart;
private int maxslot;
public void setup(Context context) throws IOException, InterruptedException {
this.tStart = context.getConfiguration().getLong("ana_tStart", 1347347421736L);
this.plotInterval = context.getConfiguration().getInt("ana_plotInterval", 1000);
this.sampleUnit = context.getConfiguration().getLong("ana_sampleUnit", 1024*1024);
this.execTime = context.getConfiguration().getLong("ana_execTime", 767829);
this.fileSize = context.getConfiguration().getLong("ana_fileSize", 500*1024*1024);
this.maxslot = (int)(execTime/plotInterval)+1;
}
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
LinearInterpolator processor = new LinearInterpolator();
for (Text val : values) {
String[] ss = val.toString().split(":");
long timeStamp = Long.parseLong(ss[0]);
long bytes = Long.parseLong(ss[1]);
double timePassed = (timeStamp-tStart)/(double)plotInterval;
processor.add((double)timePassed,(double)bytes);
}
processor.add(0, 0);
processor.add(maxslot+0.1, fileSize);
double[] resultValue = new double [maxslot+1];
double[] bytesChanged = new double[maxslot+1];
for(int i = 0; i<=maxslot; i++){
resultValue[i]=processor.get(i);
}
for (int i = 0; i<=maxslot-1; i++) {
bytesChanged[i] = resultValue[i+1]-resultValue[i];
}
bytesChanged[maxslot] = 0;
for (int ri = 0; ri<=maxslot; ri++) {
result.set(ri+","+resultValue[ri]/(double)sampleUnit+","+bytesChanged[ri]/(double)sampleUnit);
context.write(key, result);
}
}
}
public static void main(String[] args) throws Exception{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path outdir = new Path("/result.txt");
fs.delete(outdir, true);
Job job = new Job(conf, "Result Analyzer");
job.setJarByClass(Analyzer.class);
job.setMapperClass(_Mapper.class);
job.setReducerClass(_Reducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path(TestDFSIOEnh.READ_DIR, "part-00000"));
FileOutputFormat.setOutputPath(job, outdir);
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
} | apache-2.0 |
bazelbuild/intellij | java/src/com/google/idea/blaze/java/libraries/DetachAllSourceJarsAction.java | 4203 | /*
* Copyright 2018 The Bazel Authors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.idea.blaze.java.libraries;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.idea.blaze.base.actions.BlazeProjectAction;
import com.google.idea.blaze.base.model.BlazeProjectData;
import com.google.idea.blaze.base.model.LibraryKey;
import com.google.idea.blaze.base.projectview.ProjectViewSet;
import com.google.idea.blaze.base.scope.BlazeContext;
import com.google.idea.blaze.base.settings.BlazeImportSettings;
import com.google.idea.blaze.base.sync.SyncListener;
import com.google.idea.blaze.base.sync.SyncMode;
import com.google.idea.blaze.base.sync.SyncResult;
import com.google.idea.blaze.base.sync.data.BlazeProjectDataManager;
import com.google.idea.blaze.base.sync.libraries.LibraryEditor;
import com.google.idea.blaze.java.sync.model.BlazeJarLibrary;
import com.google.idea.common.util.Transactions;
import com.google.idea.sdkcompat.general.BaseSdkCompat;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.externalSystem.service.project.IdeModifiableModelsProvider;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryTablesRegistrar;
import java.util.List;
class DetachAllSourceJarsAction extends BlazeProjectAction {
@Override
protected void actionPerformedInBlazeProject(Project project, AnActionEvent e) {
detachAll(project);
}
private static void detachAll(Project project) {
BlazeProjectData blazeProjectData =
BlazeProjectDataManager.getInstance(project).getBlazeProjectData();
if (blazeProjectData == null) {
return;
}
List<Library> librariesToDetach = Lists.newArrayList();
AttachedSourceJarManager sourceJarManager = AttachedSourceJarManager.getInstance(project);
for (Library library :
LibraryTablesRegistrar.getInstance().getLibraryTable(project).getLibraries()) {
if (library.getName() == null) {
continue;
}
LibraryKey libraryKey = LibraryKey.fromIntelliJLibraryName(library.getName());
if (sourceJarManager.hasSourceJarAttached(libraryKey)) {
sourceJarManager.setHasSourceJarAttached(libraryKey, false);
librariesToDetach.add(library);
}
}
if (librariesToDetach.isEmpty()) {
return;
}
Transactions.submitWriteActionTransaction(
project,
() -> {
IdeModifiableModelsProvider modelsProvider =
BaseSdkCompat.createModifiableModelsProvider(project);
for (Library library : librariesToDetach) {
BlazeJarLibrary blazeLibrary =
LibraryActionHelper.findLibraryFromIntellijLibrary(
project, blazeProjectData, library);
if (blazeLibrary == null) {
continue;
}
LibraryEditor.updateLibrary(
project,
blazeProjectData.getArtifactLocationDecoder(),
modelsProvider,
blazeLibrary);
}
modelsProvider.commit();
});
}
static class DetachAllOnSync implements SyncListener {
@Override
public void onSyncComplete(
Project project,
BlazeContext context,
BlazeImportSettings importSettings,
ProjectViewSet projectViewSet,
ImmutableSet<Integer> buildIds,
BlazeProjectData blazeProjectData,
SyncMode syncMode,
SyncResult syncResult) {
if (syncMode == SyncMode.FULL) {
detachAll(project);
}
}
}
}
| apache-2.0 |
googleads/googleads-dotnet-lib | src/AdWords/Util/Reports/QueryBuilder/Capabilities/IFilterBuilder.cs | 1174 | // Copyright 2018, Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
namespace Google.Api.Ads.AdWords.Util.Reports
{
/// <summary>
/// Builder interface for WHERE capability.
/// </summary>
/// <typeparam name="TParent">The parent builder type.</typeparam>
internal interface IFilterBuilder<TParent>
{
/// <summary>
/// Adds a WHERE clause to the query.
/// </summary>
/// <param name="fieldName">Name of the field to filter on.</param>
/// <returns>A builder for building the WHERE clause.</returns>
IWhereBuilder<TParent> Where(string fieldName);
}
}
| apache-2.0 |
citizenmatt/gallio | src/Common/Gallio.Common.Splash/SnapPosition.cs | 1762 | // Copyright 2005-2010 Gallio Project - http://www.gallio.org/
// Portions Copyright 2000-2004 Jonathan de Halleux
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Text;
namespace Gallio.Common.Splash
{
/// <summary>
/// Provides the result of mapping a screen position to a character index.
/// </summary>
public struct SnapPosition
{
private readonly SnapKind kind;
private readonly int charIndex;
/// <summary>
/// Initializes a snap result.
/// </summary>
/// <param name="kind">The snap kind.</param>
/// <param name="charIndex">The character index of the snap, or -1 if no snap.</param>
public SnapPosition(SnapKind kind, int charIndex)
{
this.kind = kind;
this.charIndex = charIndex;
}
/// <summary>
/// Gets the snap kind.
/// </summary>
public SnapKind Kind
{
get { return kind; }
}
/// <summary>
/// Gets the character index of the snap, or -1 if no snap.
/// </summary>
public int CharIndex
{
get { return charIndex; }
}
}
}
| apache-2.0 |
chazsmi/go-education | lesson1/code/flags.go | 525 | package main
import (
"flag"
"fmt"
)
// START OMIT
func main() {
// A string flag that returns a pointer to the value
stringFlag := flag.String("name_of_flag",
"default value",
"description of how this flag should be used")
// You can also use the typeVar method where you pass a var to the function
var intFlag int
flag.IntVar(&intFlag, "int_flag_name", 0, "description of how this flag should be used")
// Needs to be called to get the values
flag.Parse()
fmt.Println(*stringFlag, intFlag)
}
// END OMIT
| apache-2.0 |
jeremyepling/TypeScript | src/compiler/core.ts | 83537 | /// <reference path="types.ts"/>
/// <reference path="performance.ts" />
/* @internal */
namespace ts {
/**
* Ternary values are defined such that
* x & y is False if either x or y is False.
* x & y is Maybe if either x or y is Maybe, but neither x or y is False.
* x & y is True if both x and y are True.
* x | y is False if both x and y are False.
* x | y is Maybe if either x or y is Maybe, but neither x or y is True.
* x | y is True if either x or y is True.
*/
export const enum Ternary {
False = 0,
Maybe = 1,
True = -1
}
const createObject = Object.create;
// More efficient to create a collator once and use its `compare` than to call `a.localeCompare(b)` many times.
export const collator: { compare(a: string, b: string): number } = typeof Intl === "object" && typeof Intl.Collator === "function" ? new Intl.Collator() : undefined;
export function createMap<T>(template?: MapLike<T>): Map<T> {
const map: Map<T> = createObject(null); // tslint:disable-line:no-null-keyword
// Using 'delete' on an object causes V8 to put the object in dictionary mode.
// This disables creation of hidden classes, which are expensive when an object is
// constantly changing shape.
map["__"] = undefined;
delete map["__"];
// Copies keys/values from template. Note that for..in will not throw if
// template is undefined, and instead will just exit the loop.
for (const key in template) if (hasOwnProperty.call(template, key)) {
map[key] = template[key];
}
return map;
}
export function createFileMap<T>(keyMapper?: (key: string) => string): FileMap<T> {
let files = createMap<T>();
return {
get,
set,
contains,
remove,
forEachValue: forEachValueInMap,
getKeys,
clear,
};
function forEachValueInMap(f: (key: Path, value: T) => void) {
for (const key in files) {
f(<Path>key, files[key]);
}
}
function getKeys() {
const keys: Path[] = [];
for (const key in files) {
keys.push(<Path>key);
}
return keys;
}
// path should already be well-formed so it does not need to be normalized
function get(path: Path): T {
return files[toKey(path)];
}
function set(path: Path, value: T) {
files[toKey(path)] = value;
}
function contains(path: Path) {
return toKey(path) in files;
}
function remove(path: Path) {
const key = toKey(path);
delete files[key];
}
function clear() {
files = createMap<T>();
}
function toKey(path: Path): string {
return keyMapper ? keyMapper(path) : path;
}
}
export function toPath(fileName: string, basePath: string, getCanonicalFileName: (path: string) => string): Path {
const nonCanonicalizedPath = isRootedDiskPath(fileName)
? normalizePath(fileName)
: getNormalizedAbsolutePath(fileName, basePath);
return <Path>getCanonicalFileName(nonCanonicalizedPath);
}
export const enum Comparison {
LessThan = -1,
EqualTo = 0,
GreaterThan = 1
}
/**
* Iterates through 'array' by index and performs the callback on each element of array until the callback
* returns a truthy value, then returns that value.
* If no such value is found, the callback is applied to each element of array and undefined is returned.
*/
export function forEach<T, U>(array: T[] | undefined, callback: (element: T, index: number) => U | undefined): U | undefined {
if (array) {
for (let i = 0, len = array.length; i < len; i++) {
const result = callback(array[i], i);
if (result) {
return result;
}
}
}
return undefined;
}
/**
* Iterates through `array` by index and performs the callback on each element of array until the callback
* returns a falsey value, then returns false.
* If no such value is found, the callback is applied to each element of array and `true` is returned.
*/
export function every<T>(array: T[], callback: (element: T, index: number) => boolean): boolean {
if (array) {
for (let i = 0, len = array.length; i < len; i++) {
if (!callback(array[i], i)) {
return false;
}
}
}
return true;
}
/** Works like Array.prototype.find, returning `undefined` if no element satisfying the predicate is found. */
export function find<T>(array: T[], predicate: (element: T, index: number) => boolean): T | undefined {
for (let i = 0, len = array.length; i < len; i++) {
const value = array[i];
if (predicate(value, i)) {
return value;
}
}
return undefined;
}
/**
* Returns the first truthy result of `callback`, or else fails.
* This is like `forEach`, but never returns undefined.
*/
export function findMap<T, U>(array: T[], callback: (element: T, index: number) => U | undefined): U {
for (let i = 0, len = array.length; i < len; i++) {
const result = callback(array[i], i);
if (result) {
return result;
}
}
Debug.fail();
}
export function contains<T>(array: T[], value: T): boolean {
if (array) {
for (const v of array) {
if (v === value) {
return true;
}
}
}
return false;
}
export function indexOf<T>(array: T[], value: T): number {
if (array) {
for (let i = 0, len = array.length; i < len; i++) {
if (array[i] === value) {
return i;
}
}
}
return -1;
}
export function indexOfAnyCharCode(text: string, charCodes: number[], start?: number): number {
for (let i = start || 0, len = text.length; i < len; i++) {
if (contains(charCodes, text.charCodeAt(i))) {
return i;
}
}
return -1;
}
export function countWhere<T>(array: T[], predicate: (x: T, i: number) => boolean): number {
let count = 0;
if (array) {
for (let i = 0; i < array.length; i++) {
const v = array[i];
if (predicate(v, i)) {
count++;
}
}
}
return count;
}
/**
* Filters an array by a predicate function. Returns the same array instance if the predicate is
* true for all elements, otherwise returns a new array instance containing the filtered subset.
*/
export function filter<T, U extends T>(array: T[], f: (x: T) => x is U): U[];
export function filter<T>(array: T[], f: (x: T) => boolean): T[];
export function filter<T>(array: T[], f: (x: T) => boolean): T[] {
if (array) {
const len = array.length;
let i = 0;
while (i < len && f(array[i])) i++;
if (i < len) {
const result = array.slice(0, i);
i++;
while (i < len) {
const item = array[i];
if (f(item)) {
result.push(item);
}
i++;
}
return result;
}
}
return array;
}
export function removeWhere<T>(array: T[], f: (x: T) => boolean): boolean {
let outIndex = 0;
for (const item of array) {
if (!f(item)) {
array[outIndex] = item;
outIndex++;
}
}
if (outIndex !== array.length) {
array.length = outIndex;
return true;
}
return false;
}
export function filterMutate<T>(array: T[], f: (x: T) => boolean): void {
let outIndex = 0;
for (const item of array) {
if (f(item)) {
array[outIndex] = item;
outIndex++;
}
}
array.length = outIndex;
}
export function map<T, U>(array: T[], f: (x: T, i: number) => U): U[] {
let result: U[];
if (array) {
result = [];
for (let i = 0; i < array.length; i++) {
result.push(f(array[i], i));
}
}
return result;
}
// Maps from T to T and avoids allocation if all elements map to themselves
export function sameMap<T>(array: T[], f: (x: T, i: number) => T): T[] {
let result: T[];
if (array) {
for (let i = 0; i < array.length; i++) {
if (result) {
result.push(f(array[i], i));
}
else {
const item = array[i];
const mapped = f(item, i);
if (item !== mapped) {
result = array.slice(0, i);
result.push(mapped);
}
}
}
}
return result || array;
}
/**
* Flattens an array containing a mix of array or non-array elements.
*
* @param array The array to flatten.
*/
export function flatten<T>(array: (T | T[])[]): T[] {
let result: T[];
if (array) {
result = [];
for (const v of array) {
if (v) {
if (isArray(v)) {
addRange(result, v);
}
else {
result.push(v);
}
}
}
}
return result;
}
/**
* Maps an array. If the mapped value is an array, it is spread into the result.
*
* @param array The array to map.
* @param mapfn The callback used to map the result into one or more values.
*/
export function flatMap<T, U>(array: T[], mapfn: (x: T, i: number) => U | U[]): U[] {
let result: U[];
if (array) {
result = [];
for (let i = 0; i < array.length; i++) {
const v = mapfn(array[i], i);
if (v) {
if (isArray(v)) {
addRange(result, v);
}
else {
result.push(v);
}
}
}
}
return result;
}
/**
* Computes the first matching span of elements and returns a tuple of the first span
* and the remaining elements.
*/
export function span<T>(array: T[], f: (x: T, i: number) => boolean): [T[], T[]] {
if (array) {
for (let i = 0; i < array.length; i++) {
if (!f(array[i], i)) {
return [array.slice(0, i), array.slice(i)];
}
}
return [array.slice(0), []];
}
return undefined;
}
/**
* Maps contiguous spans of values with the same key.
*
* @param array The array to map.
* @param keyfn A callback used to select the key for an element.
* @param mapfn A callback used to map a contiguous chunk of values to a single value.
*/
export function spanMap<T, K, U>(array: T[], keyfn: (x: T, i: number) => K, mapfn: (chunk: T[], key: K, start: number, end: number) => U): U[] {
let result: U[];
if (array) {
result = [];
const len = array.length;
let previousKey: K;
let key: K;
let start = 0;
let pos = 0;
while (start < len) {
while (pos < len) {
const value = array[pos];
key = keyfn(value, pos);
if (pos === 0) {
previousKey = key;
}
else if (key !== previousKey) {
break;
}
pos++;
}
if (start < pos) {
const v = mapfn(array.slice(start, pos), previousKey, start, pos);
if (v) {
result.push(v);
}
start = pos;
}
previousKey = key;
pos++;
}
}
return result;
}
export function mapObject<T, U>(object: MapLike<T>, f: (key: string, x: T) => [string, U]): MapLike<U> {
let result: MapLike<U>;
if (object) {
result = {};
for (const v of getOwnKeys(object)) {
const [key, value]: [string, U] = f(v, object[v]) || [undefined, undefined];
if (key !== undefined) {
result[key] = value;
}
}
}
return result;
}
export function some<T>(array: T[], predicate?: (value: T) => boolean): boolean {
if (array) {
if (predicate) {
for (const v of array) {
if (predicate(v)) {
return true;
}
}
}
else {
return array.length > 0;
}
}
return false;
}
export function concatenate<T>(array1: T[], array2: T[]): T[] {
if (!array2 || !array2.length) return array1;
if (!array1 || !array1.length) return array2;
return [...array1, ...array2];
}
// TODO: fixme (N^2) - add optional comparer so collection can be sorted before deduplication.
export function deduplicate<T>(array: T[], areEqual?: (a: T, b: T) => boolean): T[] {
let result: T[];
if (array) {
result = [];
loop: for (const item of array) {
for (const res of result) {
if (areEqual ? areEqual(res, item) : res === item) {
continue loop;
}
}
result.push(item);
}
}
return result;
}
export function arrayIsEqualTo<T>(array1: ReadonlyArray<T>, array2: ReadonlyArray<T>, equaler?: (a: T, b: T) => boolean): boolean {
if (!array1 || !array2) {
return array1 === array2;
}
if (array1.length !== array2.length) {
return false;
}
for (let i = 0; i < array1.length; i++) {
const equals = equaler ? equaler(array1[i], array2[i]) : array1[i] === array2[i];
if (!equals) {
return false;
}
}
return true;
}
export function changesAffectModuleResolution(oldOptions: CompilerOptions, newOptions: CompilerOptions): boolean {
return !oldOptions ||
(oldOptions.module !== newOptions.module) ||
(oldOptions.moduleResolution !== newOptions.moduleResolution) ||
(oldOptions.noResolve !== newOptions.noResolve) ||
(oldOptions.target !== newOptions.target) ||
(oldOptions.noLib !== newOptions.noLib) ||
(oldOptions.jsx !== newOptions.jsx) ||
(oldOptions.allowJs !== newOptions.allowJs) ||
(oldOptions.rootDir !== newOptions.rootDir) ||
(oldOptions.configFilePath !== newOptions.configFilePath) ||
(oldOptions.baseUrl !== newOptions.baseUrl) ||
(oldOptions.maxNodeModuleJsDepth !== newOptions.maxNodeModuleJsDepth) ||
!arrayIsEqualTo(oldOptions.lib, newOptions.lib) ||
!arrayIsEqualTo(oldOptions.typeRoots, newOptions.typeRoots) ||
!arrayIsEqualTo(oldOptions.rootDirs, newOptions.rootDirs) ||
!equalOwnProperties(oldOptions.paths, newOptions.paths);
}
/**
* Compacts an array, removing any falsey elements.
*/
export function compact<T>(array: T[]): T[] {
let result: T[];
if (array) {
for (let i = 0; i < array.length; i++) {
const v = array[i];
if (result || !v) {
if (!result) {
result = array.slice(0, i);
}
if (v) {
result.push(v);
}
}
}
}
return result || array;
}
export function sum(array: any[], prop: string): number {
let result = 0;
for (const v of array) {
result += v[prop];
}
return result;
}
/**
* Appends a value to an array, returning the array.
*
* @param to The array to which `value` is to be appended. If `to` is `undefined`, a new array
* is created if `value` was appended.
* @param value The value to append to the array. If `value` is `undefined`, nothing is
* appended.
*/
export function append<T>(to: T[] | undefined, value: T | undefined): T[] | undefined {
if (value === undefined) return to;
if (to === undefined) to = [];
to.push(value);
return to;
}
/**
* Appends a range of value to an array, returning the array.
*
* @param to The array to which `value` is to be appended. If `to` is `undefined`, a new array
* is created if `value` was appended.
* @param from The values to append to the array. If `from` is `undefined`, nothing is
* appended. If an element of `from` is `undefined`, that element is not appended.
*/
export function addRange<T>(to: T[] | undefined, from: T[] | undefined): T[] | undefined {
if (from === undefined) return to;
for (const v of from) {
to = append(to, v);
}
return to;
}
export function rangeEquals<T>(array1: T[], array2: T[], pos: number, end: number) {
while (pos < end) {
if (array1[pos] !== array2[pos]) {
return false;
}
pos++;
}
return true;
}
/**
* Returns the first element of an array if non-empty, `undefined` otherwise.
*/
export function firstOrUndefined<T>(array: T[]): T {
return array && array.length > 0
? array[0]
: undefined;
}
/**
* Returns the last element of an array if non-empty, `undefined` otherwise.
*/
export function lastOrUndefined<T>(array: T[]): T {
return array && array.length > 0
? array[array.length - 1]
: undefined;
}
/**
* Returns the only element of an array if it contains only one element, `undefined` otherwise.
*/
export function singleOrUndefined<T>(array: T[]): T {
return array && array.length === 1
? array[0]
: undefined;
}
/**
* Returns the only element of an array if it contains only one element; otheriwse, returns the
* array.
*/
export function singleOrMany<T>(array: T[]): T | T[] {
return array && array.length === 1
? array[0]
: array;
}
export function replaceElement<T>(array: T[], index: number, value: T): T[] {
const result = array.slice(0);
result[index] = value;
return result;
}
/**
* Performs a binary search, finding the index at which 'value' occurs in 'array'.
* If no such index is found, returns the 2's-complement of first index at which
* number[index] exceeds number.
* @param array A sorted array whose first element must be no larger than number
* @param number The value to be searched for in the array.
*/
export function binarySearch<T>(array: T[], value: T, comparer?: (v1: T, v2: T) => number): number {
if (!array || array.length === 0) {
return -1;
}
let low = 0;
let high = array.length - 1;
comparer = comparer !== undefined
? comparer
: (v1, v2) => (v1 < v2 ? -1 : (v1 > v2 ? 1 : 0));
while (low <= high) {
const middle = low + ((high - low) >> 1);
const midValue = array[middle];
if (comparer(midValue, value) === 0) {
return middle;
}
else if (comparer(midValue, value) > 0) {
high = middle - 1;
}
else {
low = middle + 1;
}
}
return ~low;
}
export function reduceLeft<T, U>(array: T[], f: (memo: U, value: T, i: number) => U, initial: U, start?: number, count?: number): U;
export function reduceLeft<T>(array: T[], f: (memo: T, value: T, i: number) => T): T;
export function reduceLeft<T>(array: T[], f: (memo: T, value: T, i: number) => T, initial?: T, start?: number, count?: number): T {
if (array && array.length > 0) {
const size = array.length;
if (size > 0) {
let pos = start === undefined || start < 0 ? 0 : start;
const end = count === undefined || pos + count > size - 1 ? size - 1 : pos + count;
let result: T;
if (arguments.length <= 2) {
result = array[pos];
pos++;
}
else {
result = initial;
}
while (pos <= end) {
result = f(result, array[pos], pos);
pos++;
}
return result;
}
}
return initial;
}
export function reduceRight<T, U>(array: T[], f: (memo: U, value: T, i: number) => U, initial: U, start?: number, count?: number): U;
export function reduceRight<T>(array: T[], f: (memo: T, value: T, i: number) => T): T;
export function reduceRight<T>(array: T[], f: (memo: T, value: T, i: number) => T, initial?: T, start?: number, count?: number): T {
if (array) {
const size = array.length;
if (size > 0) {
let pos = start === undefined || start > size - 1 ? size - 1 : start;
const end = count === undefined || pos - count < 0 ? 0 : pos - count;
let result: T;
if (arguments.length <= 2) {
result = array[pos];
pos--;
}
else {
result = initial;
}
while (pos >= end) {
result = f(result, array[pos], pos);
pos--;
}
return result;
}
}
return initial;
}
const hasOwnProperty = Object.prototype.hasOwnProperty;
/**
* Indicates whether a map-like contains an own property with the specified key.
*
* NOTE: This is intended for use only with MapLike<T> objects. For Map<T> objects, use
* the 'in' operator.
*
* @param map A map-like.
* @param key A property key.
*/
export function hasProperty<T>(map: MapLike<T>, key: string): boolean {
return hasOwnProperty.call(map, key);
}
/**
* Gets the value of an owned property in a map-like.
*
* NOTE: This is intended for use only with MapLike<T> objects. For Map<T> objects, use
* an indexer.
*
* @param map A map-like.
* @param key A property key.
*/
export function getProperty<T>(map: MapLike<T>, key: string): T | undefined {
return hasOwnProperty.call(map, key) ? map[key] : undefined;
}
/**
* Gets the owned, enumerable property keys of a map-like.
*
* NOTE: This is intended for use with MapLike<T> objects. For Map<T> objects, use
* Object.keys instead as it offers better performance.
*
* @param map A map-like.
*/
export function getOwnKeys<T>(map: MapLike<T>): string[] {
const keys: string[] = [];
for (const key in map) if (hasOwnProperty.call(map, key)) {
keys.push(key);
}
return keys;
}
/**
* Enumerates the properties of a Map<T>, invoking a callback and returning the first truthy result.
*
* @param map A map for which properties should be enumerated.
* @param callback A callback to invoke for each property.
*/
export function forEachProperty<T, U>(map: Map<T>, callback: (value: T, key: string) => U): U {
let result: U;
for (const key in map) {
if (result = callback(map[key], key)) break;
}
return result;
}
/**
* Returns true if a Map<T> has some matching property.
*
* @param map A map whose properties should be tested.
* @param predicate An optional callback used to test each property.
*/
export function someProperties<T>(map: Map<T>, predicate?: (value: T, key: string) => boolean) {
for (const key in map) {
if (!predicate || predicate(map[key], key)) return true;
}
return false;
}
/**
* Performs a shallow copy of the properties from a source Map<T> to a target MapLike<T>
*
* @param source A map from which properties should be copied.
* @param target A map to which properties should be copied.
*/
export function copyProperties<T>(source: Map<T>, target: MapLike<T>): void {
for (const key in source) {
target[key] = source[key];
}
}
export function assign<T1 extends MapLike<{}>, T2, T3>(t: T1, arg1: T2, arg2: T3): T1 & T2 & T3;
export function assign<T1 extends MapLike<{}>, T2>(t: T1, arg1: T2): T1 & T2;
export function assign<T1 extends MapLike<{}>>(t: T1, ...args: any[]): any;
export function assign<T1 extends MapLike<{}>>(t: T1, ...args: any[]) {
for (const arg of args) {
for (const p of getOwnKeys(arg)) {
t[p] = arg[p];
}
}
return t;
}
/**
* Reduce the properties of a map.
*
* NOTE: This is intended for use with Map<T> objects. For MapLike<T> objects, use
* reduceOwnProperties instead as it offers better runtime safety.
*
* @param map The map to reduce
* @param callback An aggregation function that is called for each entry in the map
* @param initial The initial value for the reduction.
*/
export function reduceProperties<T, U>(map: Map<T>, callback: (aggregate: U, value: T, key: string) => U, initial: U): U {
let result = initial;
for (const key in map) {
result = callback(result, map[key], String(key));
}
return result;
}
/**
* Reduce the properties defined on a map-like (but not from its prototype chain).
*
* NOTE: This is intended for use with MapLike<T> objects. For Map<T> objects, use
* reduceProperties instead as it offers better performance.
*
* @param map The map-like to reduce
* @param callback An aggregation function that is called for each entry in the map
* @param initial The initial value for the reduction.
*/
export function reduceOwnProperties<T, U>(map: MapLike<T>, callback: (aggregate: U, value: T, key: string) => U, initial: U): U {
let result = initial;
for (const key in map) if (hasOwnProperty.call(map, key)) {
result = callback(result, map[key], String(key));
}
return result;
}
/**
* Performs a shallow equality comparison of the contents of two map-likes.
*
* @param left A map-like whose properties should be compared.
* @param right A map-like whose properties should be compared.
*/
export function equalOwnProperties<T>(left: MapLike<T>, right: MapLike<T>, equalityComparer?: (left: T, right: T) => boolean) {
if (left === right) return true;
if (!left || !right) return false;
for (const key in left) if (hasOwnProperty.call(left, key)) {
if (!hasOwnProperty.call(right, key) === undefined) return false;
if (equalityComparer ? !equalityComparer(left[key], right[key]) : left[key] !== right[key]) return false;
}
for (const key in right) if (hasOwnProperty.call(right, key)) {
if (!hasOwnProperty.call(left, key)) return false;
}
return true;
}
/**
* Creates a map from the elements of an array.
*
* @param array the array of input elements.
* @param makeKey a function that produces a key for a given element.
*
* This function makes no effort to avoid collisions; if any two elements produce
* the same key with the given 'makeKey' function, then the element with the higher
* index in the array will be the one associated with the produced key.
*/
export function arrayToMap<T>(array: T[], makeKey: (value: T) => string): Map<T>;
export function arrayToMap<T, U>(array: T[], makeKey: (value: T) => string, makeValue: (value: T) => U): Map<U>;
export function arrayToMap<T, U>(array: T[], makeKey: (value: T) => string, makeValue?: (value: T) => U): Map<T | U> {
const result = createMap<T | U>();
for (const value of array) {
result[makeKey(value)] = makeValue ? makeValue(value) : value;
}
return result;
}
export function isEmpty<T>(map: Map<T>) {
for (const id in map) {
if (hasProperty(map, id)) {
return false;
}
}
return true;
}
export function cloneMap<T>(map: Map<T>) {
const clone = createMap<T>();
copyProperties(map, clone);
return clone;
}
export function clone<T>(object: T): T {
const result: any = {};
for (const id in object) {
if (hasOwnProperty.call(object, id)) {
result[id] = (<any>object)[id];
}
}
return result;
}
export function extend<T1, T2>(first: T1, second: T2): T1 & T2 {
const result: T1 & T2 = <any>{};
for (const id in second) if (hasOwnProperty.call(second, id)) {
(result as any)[id] = (second as any)[id];
}
for (const id in first) if (hasOwnProperty.call(first, id)) {
(result as any)[id] = (first as any)[id];
}
return result;
}
/**
* Adds the value to an array of values associated with the key, and returns the array.
* Creates the array if it does not already exist.
*/
export function multiMapAdd<V>(map: Map<V[]>, key: string | number, value: V): V[] {
const values = map[key];
if (values) {
values.push(value);
return values;
}
else {
return map[key] = [value];
}
}
/**
* Removes a value from an array of values associated with the key.
* Does not preserve the order of those values.
* Does nothing if `key` is not in `map`, or `value` is not in `map[key]`.
*/
export function multiMapRemove<V>(map: Map<V[]>, key: string, value: V): void {
const values = map[key];
if (values) {
unorderedRemoveItem(values, value);
if (!values.length) {
delete map[key];
}
}
}
/**
* Tests whether a value is an array.
*/
export function isArray(value: any): value is any[] {
return Array.isArray ? Array.isArray(value) : value instanceof Array;
}
/** Does nothing. */
export function noop(): void {}
/** Throws an error because a function is not implemented. */
export function notImplemented(): never {
throw new Error("Not implemented");
}
export function memoize<T>(callback: () => T): () => T {
let value: T;
return () => {
if (callback) {
value = callback();
callback = undefined;
}
return value;
};
}
/**
* High-order function, creates a function that executes a function composition.
* For example, `chain(a, b)` is the equivalent of `x => ((a', b') => y => b'(a'(y)))(a(x), b(x))`
*
* @param args The functions to chain.
*/
export function chain<T, U>(...args: ((t: T) => (u: U) => U)[]): (t: T) => (u: U) => U;
export function chain<T, U>(a: (t: T) => (u: U) => U, b: (t: T) => (u: U) => U, c: (t: T) => (u: U) => U, d: (t: T) => (u: U) => U, e: (t: T) => (u: U) => U): (t: T) => (u: U) => U {
if (e) {
const args: ((t: T) => (u: U) => U)[] = [];
for (let i = 0; i < arguments.length; i++) {
args[i] = arguments[i];
}
return t => compose(...map(args, f => f(t)));
}
else if (d) {
return t => compose(a(t), b(t), c(t), d(t));
}
else if (c) {
return t => compose(a(t), b(t), c(t));
}
else if (b) {
return t => compose(a(t), b(t));
}
else if (a) {
return t => compose(a(t));
}
else {
return _ => u => u;
}
}
/**
* High-order function, composes functions. Note that functions are composed inside-out;
* for example, `compose(a, b)` is the equivalent of `x => b(a(x))`.
*
* @param args The functions to compose.
*/
export function compose<T>(...args: ((t: T) => T)[]): (t: T) => T;
export function compose<T>(a: (t: T) => T, b: (t: T) => T, c: (t: T) => T, d: (t: T) => T, e: (t: T) => T): (t: T) => T {
if (e) {
const args: ((t: T) => T)[] = [];
for (let i = 0; i < arguments.length; i++) {
args[i] = arguments[i];
}
return t => reduceLeft<(t: T) => T, T>(args, (u, f) => f(u), t);
}
else if (d) {
return t => d(c(b(a(t))));
}
else if (c) {
return t => c(b(a(t)));
}
else if (b) {
return t => b(a(t));
}
else if (a) {
return t => a(t);
}
else {
return t => t;
}
}
function formatStringFromArgs(text: string, args: { [index: number]: string; }, baseIndex?: number): string {
baseIndex = baseIndex || 0;
return text.replace(/{(\d+)}/g, (_match, index?) => args[+index + baseIndex]);
}
export let localizedDiagnosticMessages: Map<string> = undefined;
export function getLocaleSpecificMessage(message: DiagnosticMessage) {
return localizedDiagnosticMessages && localizedDiagnosticMessages[message.key] || message.message;
}
export function createFileDiagnostic(file: SourceFile, start: number, length: number, message: DiagnosticMessage, ...args: (string | number)[]): Diagnostic;
export function createFileDiagnostic(file: SourceFile, start: number, length: number, message: DiagnosticMessage): Diagnostic {
const end = start + length;
Debug.assert(start >= 0, "start must be non-negative, is " + start);
Debug.assert(length >= 0, "length must be non-negative, is " + length);
if (file) {
Debug.assert(start <= file.text.length, `start must be within the bounds of the file. ${start} > ${file.text.length}`);
Debug.assert(end <= file.text.length, `end must be the bounds of the file. ${end} > ${file.text.length}`);
}
let text = getLocaleSpecificMessage(message);
if (arguments.length > 4) {
text = formatStringFromArgs(text, arguments, 4);
}
return {
file,
start,
length,
messageText: text,
category: message.category,
code: message.code,
};
}
/* internal */
export function formatMessage(_dummy: any, message: DiagnosticMessage): string {
let text = getLocaleSpecificMessage(message);
if (arguments.length > 2) {
text = formatStringFromArgs(text, arguments, 2);
}
return text;
}
export function createCompilerDiagnostic(message: DiagnosticMessage, ...args: (string | number)[]): Diagnostic;
export function createCompilerDiagnostic(message: DiagnosticMessage): Diagnostic {
let text = getLocaleSpecificMessage(message);
if (arguments.length > 1) {
text = formatStringFromArgs(text, arguments, 1);
}
return {
file: undefined,
start: undefined,
length: undefined,
messageText: text,
category: message.category,
code: message.code
};
}
export function chainDiagnosticMessages(details: DiagnosticMessageChain, message: DiagnosticMessage, ...args: any[]): DiagnosticMessageChain;
export function chainDiagnosticMessages(details: DiagnosticMessageChain, message: DiagnosticMessage): DiagnosticMessageChain {
let text = getLocaleSpecificMessage(message);
if (arguments.length > 2) {
text = formatStringFromArgs(text, arguments, 2);
}
return {
messageText: text,
category: message.category,
code: message.code,
next: details
};
}
export function concatenateDiagnosticMessageChains(headChain: DiagnosticMessageChain, tailChain: DiagnosticMessageChain): DiagnosticMessageChain {
let lastChain = headChain;
while (lastChain.next) {
lastChain = lastChain.next;
}
lastChain.next = tailChain;
return headChain;
}
export function compareValues<T>(a: T, b: T): Comparison {
if (a === b) return Comparison.EqualTo;
if (a === undefined) return Comparison.LessThan;
if (b === undefined) return Comparison.GreaterThan;
return a < b ? Comparison.LessThan : Comparison.GreaterThan;
}
export function compareStrings(a: string, b: string, ignoreCase?: boolean): Comparison {
if (a === b) return Comparison.EqualTo;
if (a === undefined) return Comparison.LessThan;
if (b === undefined) return Comparison.GreaterThan;
if (ignoreCase) {
if (collator && String.prototype.localeCompare) {
// accent means a ≠ b, a ≠ á, a = A
const result = a.localeCompare(b, /*locales*/ undefined, { usage: "sort", sensitivity: "accent" });
return result < 0 ? Comparison.LessThan : result > 0 ? Comparison.GreaterThan : Comparison.EqualTo;
}
a = a.toUpperCase();
b = b.toUpperCase();
if (a === b) return Comparison.EqualTo;
}
return a < b ? Comparison.LessThan : Comparison.GreaterThan;
}
export function compareStringsCaseInsensitive(a: string, b: string) {
return compareStrings(a, b, /*ignoreCase*/ true);
}
function getDiagnosticFileName(diagnostic: Diagnostic): string {
return diagnostic.file ? diagnostic.file.fileName : undefined;
}
export function compareDiagnostics(d1: Diagnostic, d2: Diagnostic): Comparison {
return compareValues(getDiagnosticFileName(d1), getDiagnosticFileName(d2)) ||
compareValues(d1.start, d2.start) ||
compareValues(d1.length, d2.length) ||
compareValues(d1.code, d2.code) ||
compareMessageText(d1.messageText, d2.messageText) ||
Comparison.EqualTo;
}
function compareMessageText(text1: string | DiagnosticMessageChain, text2: string | DiagnosticMessageChain): Comparison {
while (text1 && text2) {
// We still have both chains.
const string1 = typeof text1 === "string" ? text1 : text1.messageText;
const string2 = typeof text2 === "string" ? text2 : text2.messageText;
const res = compareValues(string1, string2);
if (res) {
return res;
}
text1 = typeof text1 === "string" ? undefined : text1.next;
text2 = typeof text2 === "string" ? undefined : text2.next;
}
if (!text1 && !text2) {
// if the chains are done, then these messages are the same.
return Comparison.EqualTo;
}
// We still have one chain remaining. The shorter chain should come first.
return text1 ? Comparison.GreaterThan : Comparison.LessThan;
}
export function sortAndDeduplicateDiagnostics(diagnostics: Diagnostic[]): Diagnostic[] {
return deduplicateSortedDiagnostics(diagnostics.sort(compareDiagnostics));
}
export function deduplicateSortedDiagnostics(diagnostics: Diagnostic[]): Diagnostic[] {
if (diagnostics.length < 2) {
return diagnostics;
}
const newDiagnostics = [diagnostics[0]];
let previousDiagnostic = diagnostics[0];
for (let i = 1; i < diagnostics.length; i++) {
const currentDiagnostic = diagnostics[i];
const isDupe = compareDiagnostics(currentDiagnostic, previousDiagnostic) === Comparison.EqualTo;
if (!isDupe) {
newDiagnostics.push(currentDiagnostic);
previousDiagnostic = currentDiagnostic;
}
}
return newDiagnostics;
}
export function normalizeSlashes(path: string): string {
return path.replace(/\\/g, "/");
}
/**
* Returns length of path root (i.e. length of "/", "x:/", "//server/share/, file:///user/files")
*/
export function getRootLength(path: string): number {
if (path.charCodeAt(0) === CharacterCodes.slash) {
if (path.charCodeAt(1) !== CharacterCodes.slash) return 1;
const p1 = path.indexOf("/", 2);
if (p1 < 0) return 2;
const p2 = path.indexOf("/", p1 + 1);
if (p2 < 0) return p1 + 1;
return p2 + 1;
}
if (path.charCodeAt(1) === CharacterCodes.colon) {
if (path.charCodeAt(2) === CharacterCodes.slash) return 3;
return 2;
}
// Per RFC 1738 'file' URI schema has the shape file://<host>/<path>
// if <host> is omitted then it is assumed that host value is 'localhost',
// however slash after the omitted <host> is not removed.
// file:///folder1/file1 - this is a correct URI
// file://folder2/file2 - this is an incorrect URI
if (path.lastIndexOf("file:///", 0) === 0) {
return "file:///".length;
}
const idx = path.indexOf("://");
if (idx !== -1) {
return idx + "://".length;
}
return 0;
}
/**
* Internally, we represent paths as strings with '/' as the directory separator.
* When we make system calls (eg: LanguageServiceHost.getDirectory()),
* we expect the host to correctly handle paths in our specified format.
*/
export const directorySeparator = "/";
const directorySeparatorCharCode = CharacterCodes.slash;
function getNormalizedParts(normalizedSlashedPath: string, rootLength: number): string[] {
const parts = normalizedSlashedPath.substr(rootLength).split(directorySeparator);
const normalized: string[] = [];
for (const part of parts) {
if (part !== ".") {
if (part === ".." && normalized.length > 0 && lastOrUndefined(normalized) !== "..") {
normalized.pop();
}
else {
// A part may be an empty string (which is 'falsy') if the path had consecutive slashes,
// e.g. "path//file.ts". Drop these before re-joining the parts.
if (part) {
normalized.push(part);
}
}
}
}
return normalized;
}
export function normalizePath(path: string): string {
path = normalizeSlashes(path);
const rootLength = getRootLength(path);
const root = path.substr(0, rootLength);
const normalized = getNormalizedParts(path, rootLength);
if (normalized.length) {
const joinedParts = root + normalized.join(directorySeparator);
return pathEndsWithDirectorySeparator(path) ? joinedParts + directorySeparator : joinedParts;
}
else {
return root;
}
}
/** A path ending with '/' refers to a directory only, never a file. */
export function pathEndsWithDirectorySeparator(path: string): boolean {
return path.charCodeAt(path.length - 1) === directorySeparatorCharCode;
}
/**
* Returns the path except for its basename. Eg:
*
* /path/to/file.ext -> /path/to
*/
export function getDirectoryPath(path: Path): Path;
export function getDirectoryPath(path: string): string;
export function getDirectoryPath(path: string): any {
return path.substr(0, Math.max(getRootLength(path), path.lastIndexOf(directorySeparator)));
}
export function isUrl(path: string) {
return path && !isRootedDiskPath(path) && path.indexOf("://") !== -1;
}
export function isExternalModuleNameRelative(moduleName: string): boolean {
// TypeScript 1.0 spec (April 2014): 11.2.1
// An external module name is "relative" if the first term is "." or "..".
return /^\.\.?($|[\\/])/.test(moduleName);
}
export function getEmitScriptTarget(compilerOptions: CompilerOptions) {
return compilerOptions.target || ScriptTarget.ES3;
}
export function getEmitModuleKind(compilerOptions: CompilerOptions) {
return typeof compilerOptions.module === "number" ?
compilerOptions.module :
getEmitScriptTarget(compilerOptions) >= ScriptTarget.ES2015 ? ModuleKind.ES2015 : ModuleKind.CommonJS;
}
/* @internal */
export function hasZeroOrOneAsteriskCharacter(str: string): boolean {
let seenAsterisk = false;
for (let i = 0; i < str.length; i++) {
if (str.charCodeAt(i) === CharacterCodes.asterisk) {
if (!seenAsterisk) {
seenAsterisk = true;
}
else {
// have already seen asterisk
return false;
}
}
}
return true;
}
export function isRootedDiskPath(path: string) {
return getRootLength(path) !== 0;
}
export function convertToRelativePath(absoluteOrRelativePath: string, basePath: string, getCanonicalFileName: (path: string) => string): string {
return !isRootedDiskPath(absoluteOrRelativePath)
? absoluteOrRelativePath
: getRelativePathToDirectoryOrUrl(basePath, absoluteOrRelativePath, basePath, getCanonicalFileName, /*isAbsolutePathAnUrl*/ false);
}
function normalizedPathComponents(path: string, rootLength: number) {
const normalizedParts = getNormalizedParts(path, rootLength);
return [path.substr(0, rootLength)].concat(normalizedParts);
}
export function getNormalizedPathComponents(path: string, currentDirectory: string) {
path = normalizeSlashes(path);
let rootLength = getRootLength(path);
if (rootLength === 0) {
// If the path is not rooted it is relative to current directory
path = combinePaths(normalizeSlashes(currentDirectory), path);
rootLength = getRootLength(path);
}
return normalizedPathComponents(path, rootLength);
}
export function getNormalizedAbsolutePath(fileName: string, currentDirectory: string) {
return getNormalizedPathFromPathComponents(getNormalizedPathComponents(fileName, currentDirectory));
}
export function getNormalizedPathFromPathComponents(pathComponents: string[]) {
if (pathComponents && pathComponents.length) {
return pathComponents[0] + pathComponents.slice(1).join(directorySeparator);
}
}
function getNormalizedPathComponentsOfUrl(url: string) {
// Get root length of http://www.website.com/folder1/folder2/
// In this example the root is: http://www.website.com/
// normalized path components should be ["http://www.website.com/", "folder1", "folder2"]
const urlLength = url.length;
// Initial root length is http:// part
let rootLength = url.indexOf("://") + "://".length;
while (rootLength < urlLength) {
// Consume all immediate slashes in the protocol
// eg.initial rootlength is just file:// but it needs to consume another "/" in file:///
if (url.charCodeAt(rootLength) === CharacterCodes.slash) {
rootLength++;
}
else {
// non slash character means we continue proceeding to next component of root search
break;
}
}
// there are no parts after http:// just return current string as the pathComponent
if (rootLength === urlLength) {
return [url];
}
// Find the index of "/" after website.com so the root can be http://www.website.com/ (from existing http://)
const indexOfNextSlash = url.indexOf(directorySeparator, rootLength);
if (indexOfNextSlash !== -1) {
// Found the "/" after the website.com so the root is length of http://www.website.com/
// and get components after the root normally like any other folder components
rootLength = indexOfNextSlash + 1;
return normalizedPathComponents(url, rootLength);
}
else {
// Can't find the host assume the rest of the string as component
// but make sure we append "/" to it as root is not joined using "/"
// eg. if url passed in was http://website.com we want to use root as [http://website.com/]
// so that other path manipulations will be correct and it can be merged with relative paths correctly
return [url + directorySeparator];
}
}
function getNormalizedPathOrUrlComponents(pathOrUrl: string, currentDirectory: string) {
if (isUrl(pathOrUrl)) {
return getNormalizedPathComponentsOfUrl(pathOrUrl);
}
else {
return getNormalizedPathComponents(pathOrUrl, currentDirectory);
}
}
export function getRelativePathToDirectoryOrUrl(directoryPathOrUrl: string, relativeOrAbsolutePath: string, currentDirectory: string, getCanonicalFileName: (fileName: string) => string, isAbsolutePathAnUrl: boolean) {
const pathComponents = getNormalizedPathOrUrlComponents(relativeOrAbsolutePath, currentDirectory);
const directoryComponents = getNormalizedPathOrUrlComponents(directoryPathOrUrl, currentDirectory);
if (directoryComponents.length > 1 && lastOrUndefined(directoryComponents) === "") {
// If the directory path given was of type test/cases/ then we really need components of directory to be only till its name
// that is ["test", "cases", ""] needs to be actually ["test", "cases"]
directoryComponents.length--;
}
// Find the component that differs
let joinStartIndex: number;
for (joinStartIndex = 0; joinStartIndex < pathComponents.length && joinStartIndex < directoryComponents.length; joinStartIndex++) {
if (getCanonicalFileName(directoryComponents[joinStartIndex]) !== getCanonicalFileName(pathComponents[joinStartIndex])) {
break;
}
}
// Get the relative path
if (joinStartIndex) {
let relativePath = "";
const relativePathComponents = pathComponents.slice(joinStartIndex, pathComponents.length);
for (; joinStartIndex < directoryComponents.length; joinStartIndex++) {
if (directoryComponents[joinStartIndex] !== "") {
relativePath = relativePath + ".." + directorySeparator;
}
}
return relativePath + relativePathComponents.join(directorySeparator);
}
// Cant find the relative path, get the absolute path
let absolutePath = getNormalizedPathFromPathComponents(pathComponents);
if (isAbsolutePathAnUrl && isRootedDiskPath(absolutePath)) {
absolutePath = "file:///" + absolutePath;
}
return absolutePath;
}
export function getBaseFileName(path: string) {
if (path === undefined) {
return undefined;
}
const i = path.lastIndexOf(directorySeparator);
return i < 0 ? path : path.substring(i + 1);
}
export function combinePaths(path1: string, path2: string) {
if (!(path1 && path1.length)) return path2;
if (!(path2 && path2.length)) return path1;
if (getRootLength(path2) !== 0) return path2;
if (path1.charAt(path1.length - 1) === directorySeparator) return path1 + path2;
return path1 + directorySeparator + path2;
}
/**
* Removes a trailing directory separator from a path.
* @param path The path.
*/
export function removeTrailingDirectorySeparator(path: string) {
if (path.charAt(path.length - 1) === directorySeparator) {
return path.substr(0, path.length - 1);
}
return path;
}
/**
* Adds a trailing directory separator to a path, if it does not already have one.
* @param path The path.
*/
export function ensureTrailingDirectorySeparator(path: string) {
if (path.charAt(path.length - 1) !== directorySeparator) {
return path + directorySeparator;
}
return path;
}
export function comparePaths(a: string, b: string, currentDirectory: string, ignoreCase?: boolean) {
if (a === b) return Comparison.EqualTo;
if (a === undefined) return Comparison.LessThan;
if (b === undefined) return Comparison.GreaterThan;
a = removeTrailingDirectorySeparator(a);
b = removeTrailingDirectorySeparator(b);
const aComponents = getNormalizedPathComponents(a, currentDirectory);
const bComponents = getNormalizedPathComponents(b, currentDirectory);
const sharedLength = Math.min(aComponents.length, bComponents.length);
for (let i = 0; i < sharedLength; i++) {
const result = compareStrings(aComponents[i], bComponents[i], ignoreCase);
if (result !== Comparison.EqualTo) {
return result;
}
}
return compareValues(aComponents.length, bComponents.length);
}
export function containsPath(parent: string, child: string, currentDirectory: string, ignoreCase?: boolean) {
if (parent === undefined || child === undefined) return false;
if (parent === child) return true;
parent = removeTrailingDirectorySeparator(parent);
child = removeTrailingDirectorySeparator(child);
if (parent === child) return true;
const parentComponents = getNormalizedPathComponents(parent, currentDirectory);
const childComponents = getNormalizedPathComponents(child, currentDirectory);
if (childComponents.length < parentComponents.length) {
return false;
}
for (let i = 0; i < parentComponents.length; i++) {
const result = compareStrings(parentComponents[i], childComponents[i], ignoreCase);
if (result !== Comparison.EqualTo) {
return false;
}
}
return true;
}
/* @internal */
export function startsWith(str: string, prefix: string): boolean {
return str.lastIndexOf(prefix, 0) === 0;
}
/* @internal */
export function endsWith(str: string, suffix: string): boolean {
const expectedPos = str.length - suffix.length;
return expectedPos >= 0 && str.indexOf(suffix, expectedPos) === expectedPos;
}
export function fileExtensionIs(path: string, extension: string): boolean {
return path.length > extension.length && endsWith(path, extension);
}
export function fileExtensionIsAny(path: string, extensions: string[]): boolean {
for (const extension of extensions) {
if (fileExtensionIs(path, extension)) {
return true;
}
}
return false;
}
// Reserved characters, forces escaping of any non-word (or digit), non-whitespace character.
// It may be inefficient (we could just match (/[-[\]{}()*+?.,\\^$|#\s]/g), but this is future
// proof.
const reservedCharacterPattern = /[^\w\s\/]/g;
const wildcardCharCodes = [CharacterCodes.asterisk, CharacterCodes.question];
/**
* Matches any single directory segment unless it is the last segment and a .min.js file
* Breakdown:
* [^./] # matches everything up to the first . character (excluding directory seperators)
* (\\.(?!min\\.js$))? # matches . characters but not if they are part of the .min.js file extension
*/
const singleAsteriskRegexFragmentFiles = "([^./]|(\\.(?!min\\.js$))?)*";
const singleAsteriskRegexFragmentOther = "[^/]*";
export function getRegularExpressionForWildcard(specs: string[], basePath: string, usage: "files" | "directories" | "exclude") {
if (specs === undefined || specs.length === 0) {
return undefined;
}
const replaceWildcardCharacter = usage === "files" ? replaceWildCardCharacterFiles : replaceWildCardCharacterOther;
const singleAsteriskRegexFragment = usage === "files" ? singleAsteriskRegexFragmentFiles : singleAsteriskRegexFragmentOther;
/**
* Regex for the ** wildcard. Matches any number of subdirectories. When used for including
* files or directories, does not match subdirectories that start with a . character
*/
const doubleAsteriskRegexFragment = usage === "exclude" ? "(/.+?)?" : "(/[^/.][^/]*)*?";
let pattern = "";
let hasWrittenSubpattern = false;
spec: for (const spec of specs) {
if (!spec) {
continue;
}
let subpattern = "";
let hasRecursiveDirectoryWildcard = false;
let hasWrittenComponent = false;
const components = getNormalizedPathComponents(spec, basePath);
if (usage !== "exclude" && components[components.length - 1] === "**") {
continue spec;
}
// getNormalizedPathComponents includes the separator for the root component.
// We need to remove to create our regex correctly.
components[0] = removeTrailingDirectorySeparator(components[0]);
let optionalCount = 0;
for (let component of components) {
if (component === "**") {
if (hasRecursiveDirectoryWildcard) {
continue spec;
}
subpattern += doubleAsteriskRegexFragment;
hasRecursiveDirectoryWildcard = true;
hasWrittenComponent = true;
}
else {
if (usage === "directories") {
subpattern += "(";
optionalCount++;
}
if (hasWrittenComponent) {
subpattern += directorySeparator;
}
if (usage !== "exclude") {
// The * and ? wildcards should not match directories or files that start with . if they
// appear first in a component. Dotted directories and files can be included explicitly
// like so: **/.*/.*
if (component.charCodeAt(0) === CharacterCodes.asterisk) {
subpattern += "([^./]" + singleAsteriskRegexFragment + ")?";
component = component.substr(1);
}
else if (component.charCodeAt(0) === CharacterCodes.question) {
subpattern += "[^./]";
component = component.substr(1);
}
}
subpattern += component.replace(reservedCharacterPattern, replaceWildcardCharacter);
hasWrittenComponent = true;
}
}
while (optionalCount > 0) {
subpattern += ")?";
optionalCount--;
}
if (hasWrittenSubpattern) {
pattern += "|";
}
pattern += "(" + subpattern + ")";
hasWrittenSubpattern = true;
}
if (!pattern) {
return undefined;
}
return "^(" + pattern + (usage === "exclude" ? ")($|/)" : ")$");
}
function replaceWildCardCharacterFiles(match: string) {
return replaceWildcardCharacter(match, singleAsteriskRegexFragmentFiles);
}
function replaceWildCardCharacterOther(match: string) {
return replaceWildcardCharacter(match, singleAsteriskRegexFragmentOther);
}
function replaceWildcardCharacter(match: string, singleAsteriskRegexFragment: string) {
return match === "*" ? singleAsteriskRegexFragment : match === "?" ? "[^/]" : "\\" + match;
}
export interface FileSystemEntries {
files: string[];
directories: string[];
}
export interface FileMatcherPatterns {
includeFilePattern: string;
includeDirectoryPattern: string;
excludePattern: string;
basePaths: string[];
}
export function getFileMatcherPatterns(path: string, excludes: string[], includes: string[], useCaseSensitiveFileNames: boolean, currentDirectory: string): FileMatcherPatterns {
path = normalizePath(path);
currentDirectory = normalizePath(currentDirectory);
const absolutePath = combinePaths(currentDirectory, path);
return {
includeFilePattern: getRegularExpressionForWildcard(includes, absolutePath, "files"),
includeDirectoryPattern: getRegularExpressionForWildcard(includes, absolutePath, "directories"),
excludePattern: getRegularExpressionForWildcard(excludes, absolutePath, "exclude"),
basePaths: getBasePaths(path, includes, useCaseSensitiveFileNames)
};
}
export function matchFiles(path: string, extensions: string[], excludes: string[], includes: string[], useCaseSensitiveFileNames: boolean, currentDirectory: string, getFileSystemEntries: (path: string) => FileSystemEntries): string[] {
path = normalizePath(path);
currentDirectory = normalizePath(currentDirectory);
const patterns = getFileMatcherPatterns(path, excludes, includes, useCaseSensitiveFileNames, currentDirectory);
const regexFlag = useCaseSensitiveFileNames ? "" : "i";
const includeFileRegex = patterns.includeFilePattern && new RegExp(patterns.includeFilePattern, regexFlag);
const includeDirectoryRegex = patterns.includeDirectoryPattern && new RegExp(patterns.includeDirectoryPattern, regexFlag);
const excludeRegex = patterns.excludePattern && new RegExp(patterns.excludePattern, regexFlag);
const result: string[] = [];
for (const basePath of patterns.basePaths) {
visitDirectory(basePath, combinePaths(currentDirectory, basePath));
}
return result;
function visitDirectory(path: string, absolutePath: string) {
const { files, directories } = getFileSystemEntries(path);
for (const current of files) {
const name = combinePaths(path, current);
const absoluteName = combinePaths(absolutePath, current);
if ((!extensions || fileExtensionIsAny(name, extensions)) &&
(!includeFileRegex || includeFileRegex.test(absoluteName)) &&
(!excludeRegex || !excludeRegex.test(absoluteName))) {
result.push(name);
}
}
for (const current of directories) {
const name = combinePaths(path, current);
const absoluteName = combinePaths(absolutePath, current);
if ((!includeDirectoryRegex || includeDirectoryRegex.test(absoluteName)) &&
(!excludeRegex || !excludeRegex.test(absoluteName))) {
visitDirectory(name, absoluteName);
}
}
}
}
/**
* Computes the unique non-wildcard base paths amongst the provided include patterns.
*/
function getBasePaths(path: string, includes: string[], useCaseSensitiveFileNames: boolean) {
// Storage for our results in the form of literal paths (e.g. the paths as written by the user).
const basePaths: string[] = [path];
if (includes) {
// Storage for literal base paths amongst the include patterns.
const includeBasePaths: string[] = [];
for (const include of includes) {
// We also need to check the relative paths by converting them to absolute and normalizing
// in case they escape the base path (e.g "..\somedirectory")
const absolute: string = isRootedDiskPath(include) ? include : normalizePath(combinePaths(path, include));
const wildcardOffset = indexOfAnyCharCode(absolute, wildcardCharCodes);
const includeBasePath = wildcardOffset < 0
? removeTrailingDirectorySeparator(getDirectoryPath(absolute))
: absolute.substring(0, absolute.lastIndexOf(directorySeparator, wildcardOffset));
// Append the literal and canonical candidate base paths.
includeBasePaths.push(includeBasePath);
}
// Sort the offsets array using either the literal or canonical path representations.
includeBasePaths.sort(useCaseSensitiveFileNames ? compareStrings : compareStringsCaseInsensitive);
// Iterate over each include base path and include unique base paths that are not a
// subpath of an existing base path
include: for (let i = 0; i < includeBasePaths.length; i++) {
const includeBasePath = includeBasePaths[i];
for (let j = 0; j < basePaths.length; j++) {
if (containsPath(basePaths[j], includeBasePath, path, !useCaseSensitiveFileNames)) {
continue include;
}
}
basePaths.push(includeBasePath);
}
}
return basePaths;
}
export function ensureScriptKind(fileName: string, scriptKind?: ScriptKind): ScriptKind {
// Using scriptKind as a condition handles both:
// - 'scriptKind' is unspecified and thus it is `undefined`
// - 'scriptKind' is set and it is `Unknown` (0)
// If the 'scriptKind' is 'undefined' or 'Unknown' then we attempt
// to get the ScriptKind from the file name. If it cannot be resolved
// from the file name then the default 'TS' script kind is returned.
return (scriptKind || getScriptKindFromFileName(fileName)) || ScriptKind.TS;
}
export function getScriptKindFromFileName(fileName: string): ScriptKind {
const ext = fileName.substr(fileName.lastIndexOf("."));
switch (ext.toLowerCase()) {
case ".js":
return ScriptKind.JS;
case ".jsx":
return ScriptKind.JSX;
case ".ts":
return ScriptKind.TS;
case ".tsx":
return ScriptKind.TSX;
default:
return ScriptKind.Unknown;
}
}
/**
* List of supported extensions in order of file resolution precedence.
*/
export const supportedTypeScriptExtensions = [".ts", ".tsx", ".d.ts"];
/** Must have ".d.ts" first because if ".ts" goes first, that will be detected as the extension instead of ".d.ts". */
export const supportedTypescriptExtensionsForExtractExtension = [".d.ts", ".ts", ".tsx"];
export const supportedJavascriptExtensions = [".js", ".jsx"];
const allSupportedExtensions = supportedTypeScriptExtensions.concat(supportedJavascriptExtensions);
export function getSupportedExtensions(options?: CompilerOptions): string[] {
return options && options.allowJs ? allSupportedExtensions : supportedTypeScriptExtensions;
}
export function hasJavaScriptFileExtension(fileName: string) {
return forEach(supportedJavascriptExtensions, extension => fileExtensionIs(fileName, extension));
}
export function hasTypeScriptFileExtension(fileName: string) {
return forEach(supportedTypeScriptExtensions, extension => fileExtensionIs(fileName, extension));
}
export function isSupportedSourceFileName(fileName: string, compilerOptions?: CompilerOptions) {
if (!fileName) { return false; }
for (const extension of getSupportedExtensions(compilerOptions)) {
if (fileExtensionIs(fileName, extension)) {
return true;
}
}
return false;
}
/**
* Extension boundaries by priority. Lower numbers indicate higher priorities, and are
* aligned to the offset of the highest priority extension in the
* allSupportedExtensions array.
*/
export const enum ExtensionPriority {
TypeScriptFiles = 0,
DeclarationAndJavaScriptFiles = 2,
Limit = 5,
Highest = TypeScriptFiles,
Lowest = DeclarationAndJavaScriptFiles,
}
export function getExtensionPriority(path: string, supportedExtensions: string[]): ExtensionPriority {
for (let i = supportedExtensions.length - 1; i >= 0; i--) {
if (fileExtensionIs(path, supportedExtensions[i])) {
return adjustExtensionPriority(<ExtensionPriority>i);
}
}
// If its not in the list of supported extensions, this is likely a
// TypeScript file with a non-ts extension
return ExtensionPriority.Highest;
}
/**
* Adjusts an extension priority to be the highest priority within the same range.
*/
export function adjustExtensionPriority(extensionPriority: ExtensionPriority): ExtensionPriority {
if (extensionPriority < ExtensionPriority.DeclarationAndJavaScriptFiles) {
return ExtensionPriority.TypeScriptFiles;
}
else if (extensionPriority < ExtensionPriority.Limit) {
return ExtensionPriority.DeclarationAndJavaScriptFiles;
}
else {
return ExtensionPriority.Limit;
}
}
/**
* Gets the next lowest extension priority for a given priority.
*/
export function getNextLowestExtensionPriority(extensionPriority: ExtensionPriority): ExtensionPriority {
if (extensionPriority < ExtensionPriority.DeclarationAndJavaScriptFiles) {
return ExtensionPriority.DeclarationAndJavaScriptFiles;
}
else {
return ExtensionPriority.Limit;
}
}
const extensionsToRemove = [".d.ts", ".ts", ".js", ".tsx", ".jsx"];
export function removeFileExtension(path: string): string {
for (const ext of extensionsToRemove) {
const extensionless = tryRemoveExtension(path, ext);
if (extensionless !== undefined) {
return extensionless;
}
}
return path;
}
export function tryRemoveExtension(path: string, extension: string): string | undefined {
return fileExtensionIs(path, extension) ? removeExtension(path, extension) : undefined;
}
export function removeExtension(path: string, extension: string): string {
return path.substring(0, path.length - extension.length);
}
export function changeExtension<T extends string | Path>(path: T, newExtension: string): T {
return <T>(removeFileExtension(path) + newExtension);
}
export interface ObjectAllocator {
getNodeConstructor(): new (kind: SyntaxKind, pos?: number, end?: number) => Node;
getTokenConstructor(): new <TKind extends SyntaxKind>(kind: TKind, pos?: number, end?: number) => Token<TKind>;
getIdentifierConstructor(): new (kind: SyntaxKind.Identifier, pos?: number, end?: number) => Identifier;
getSourceFileConstructor(): new (kind: SyntaxKind.SourceFile, pos?: number, end?: number) => SourceFile;
getSymbolConstructor(): new (flags: SymbolFlags, name: string) => Symbol;
getTypeConstructor(): new (checker: TypeChecker, flags: TypeFlags) => Type;
getSignatureConstructor(): new (checker: TypeChecker) => Signature;
}
function Symbol(this: Symbol, flags: SymbolFlags, name: string) {
this.flags = flags;
this.name = name;
this.declarations = undefined;
}
function Type(this: Type, _checker: TypeChecker, flags: TypeFlags) {
this.flags = flags;
}
function Signature() {
}
function Node(this: Node, kind: SyntaxKind, pos: number, end: number) {
this.id = 0;
this.kind = kind;
this.pos = pos;
this.end = end;
this.flags = NodeFlags.None;
this.modifierFlagsCache = ModifierFlags.None;
this.transformFlags = TransformFlags.None;
this.parent = undefined;
this.original = undefined;
}
export let objectAllocator: ObjectAllocator = {
getNodeConstructor: () => <any>Node,
getTokenConstructor: () => <any>Node,
getIdentifierConstructor: () => <any>Node,
getSourceFileConstructor: () => <any>Node,
getSymbolConstructor: () => <any>Symbol,
getTypeConstructor: () => <any>Type,
getSignatureConstructor: () => <any>Signature
};
export const enum AssertionLevel {
None = 0,
Normal = 1,
Aggressive = 2,
VeryAggressive = 3,
}
export namespace Debug {
export let currentAssertionLevel = AssertionLevel.None;
export function shouldAssert(level: AssertionLevel): boolean {
return currentAssertionLevel >= level;
}
export function assert(expression: boolean, message?: string, verboseDebugInfo?: () => string): void {
if (!expression) {
let verboseDebugString = "";
if (verboseDebugInfo) {
verboseDebugString = "\r\nVerbose Debug Information: " + verboseDebugInfo();
}
debugger;
throw new Error("Debug Failure. False expression: " + (message || "") + verboseDebugString);
}
}
export function fail(message?: string): void {
Debug.assert(/*expression*/ false, message);
}
}
/** Remove an item from an array, moving everything to its right one space left. */
export function orderedRemoveItemAt<T>(array: T[], index: number): void {
// This seems to be faster than either `array.splice(i, 1)` or `array.copyWithin(i, i+ 1)`.
for (let i = index; i < array.length - 1; i++) {
array[i] = array[i + 1];
}
array.pop();
}
export function unorderedRemoveItemAt<T>(array: T[], index: number): void {
// Fill in the "hole" left at `index`.
array[index] = array[array.length - 1];
array.pop();
}
/** Remove the *first* occurrence of `item` from the array. */
export function unorderedRemoveItem<T>(array: T[], item: T): void {
unorderedRemoveFirstItemWhere(array, element => element === item);
}
/** Remove the *first* element satisfying `predicate`. */
function unorderedRemoveFirstItemWhere<T>(array: T[], predicate: (element: T) => boolean): void {
for (let i = 0; i < array.length; i++) {
if (predicate(array[i])) {
unorderedRemoveItemAt(array, i);
break;
}
}
}
export function createGetCanonicalFileName(useCaseSensitiveFileNames: boolean): (fileName: string) => string {
return useCaseSensitiveFileNames
? ((fileName) => fileName)
: ((fileName) => fileName.toLowerCase());
}
/**
* patternStrings contains both pattern strings (containing "*") and regular strings.
* Return an exact match if possible, or a pattern match, or undefined.
* (These are verified by verifyCompilerOptions to have 0 or 1 "*" characters.)
*/
/* @internal */
export function matchPatternOrExact(patternStrings: string[], candidate: string): string | Pattern | undefined {
const patterns: Pattern[] = [];
for (const patternString of patternStrings) {
const pattern = tryParsePattern(patternString);
if (pattern) {
patterns.push(pattern);
}
else if (patternString === candidate) {
// pattern was matched as is - no need to search further
return patternString;
}
}
return findBestPatternMatch(patterns, _ => _, candidate);
}
/* @internal */
export function patternText({prefix, suffix}: Pattern): string {
return `${prefix}*${suffix}`;
}
/**
* Given that candidate matches pattern, returns the text matching the '*'.
* E.g.: matchedText(tryParsePattern("foo*baz"), "foobarbaz") === "bar"
*/
/* @internal */
export function matchedText(pattern: Pattern, candidate: string): string {
Debug.assert(isPatternMatch(pattern, candidate));
return candidate.substr(pattern.prefix.length, candidate.length - pattern.suffix.length);
}
/** Return the object corresponding to the best pattern to match `candidate`. */
/* @internal */
export function findBestPatternMatch<T>(values: T[], getPattern: (value: T) => Pattern, candidate: string): T | undefined {
let matchedValue: T | undefined = undefined;
// use length of prefix as betterness criteria
let longestMatchPrefixLength = -1;
for (const v of values) {
const pattern = getPattern(v);
if (isPatternMatch(pattern, candidate) && pattern.prefix.length > longestMatchPrefixLength) {
longestMatchPrefixLength = pattern.prefix.length;
matchedValue = v;
}
}
return matchedValue;
}
function isPatternMatch({prefix, suffix}: Pattern, candidate: string) {
return candidate.length >= prefix.length + suffix.length &&
startsWith(candidate, prefix) &&
endsWith(candidate, suffix);
}
/* @internal */
export function tryParsePattern(pattern: string): Pattern | undefined {
// This should be verified outside of here and a proper error thrown.
Debug.assert(hasZeroOrOneAsteriskCharacter(pattern));
const indexOfStar = pattern.indexOf("*");
return indexOfStar === -1 ? undefined : {
prefix: pattern.substr(0, indexOfStar),
suffix: pattern.substr(indexOfStar + 1)
};
}
export function positionIsSynthesized(pos: number): boolean {
// This is a fast way of testing the following conditions:
// pos === undefined || pos === null || isNaN(pos) || pos < 0;
return !(pos >= 0);
}
/** True if an extension is one of the supported TypeScript extensions. */
export function extensionIsTypeScript(ext: Extension): boolean {
return ext <= Extension.LastTypeScriptExtension;
}
/**
* Gets the extension from a path.
* Path must have a valid extension.
*/
export function extensionFromPath(path: string): Extension {
if (fileExtensionIs(path, ".d.ts")) {
return Extension.Dts;
}
if (fileExtensionIs(path, ".ts")) {
return Extension.Ts;
}
if (fileExtensionIs(path, ".tsx")) {
return Extension.Tsx;
}
if (fileExtensionIs(path, ".js")) {
return Extension.Js;
}
if (fileExtensionIs(path, ".jsx")) {
return Extension.Jsx;
}
Debug.fail(`File ${path} has unknown extension.`);
return Extension.Js;
}
}
| apache-2.0 |
baardev/lbtb | concrete/elements/permission/details/tree/node.php | 418 | <?
defined('C5_EXECUTE') or die("Access Denied.");
?>
<?
$pk = PermissionKey::getByID($_REQUEST['pkID']);
$pk->setPermissionObject($node);
?>
<? Loader::element("permission/detail", array('permissionKey' => $pk)); ?>
<script type="text/javascript">
var ccm_permissionDialogURL = '<?=Loader::helper('concrete/urls')->getToolsURL('permissions/dialogs/tree/node')?>?treeNodeID=<?=$node->getTreeNodeID()?>';
</script> | apache-2.0 |
OneWorld0neDream/QingQiQiu | app/src/main/java/com/qf/lenovo/qingqiqiu/storage/StorageFileName.java | 248 | package com.qf.lenovo.qingqiqiu.storage;
/**
* Created by 31098 on 9/21/2016.
*/
public interface StorageFileName {
String INTERSTORAGE_HISTORY_TAGS_FILE_NAME = "history_tags";
String PREFERENCE_KEY_FIRST_START = "FIRST_START_FLAG";
}
| apache-2.0 |
kave-cc/csharp-commons | KaVE.Commons/Utils/Json/EnumToStringConverter.cs | 1427 | /*
* Copyright 2014 Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using KaVE.Commons.Utils.Assertion;
using Newtonsoft.Json;
namespace KaVE.Commons.Utils.Json
{
public class EnumToStringConverter : JsonConverter
{
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
writer.WriteValue(value.ToString());
}
public override object ReadJson(JsonReader reader,
Type objectType,
object existingValue,
JsonSerializer serializer)
{
Asserts.That(reader.TokenType == JsonToken.String);
var value = (string)reader.Value;
return Enum.Parse(objectType, value);
}
public override bool CanConvert(Type objectType)
{
return objectType.IsEnum;
}
}
} | apache-2.0 |
hobinyoon/apache-cassandra-3.0.5-src | src/java/org/apache/cassandra/db/compaction/DateTieredCompactionStrategy.java | 19412 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.compaction;
import java.util.*;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Predicate;
import com.google.common.collect.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.lifecycle.LifecycleTransaction;
import org.apache.cassandra.db.lifecycle.SSTableSet;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.schema.CompactionParams;
import org.apache.cassandra.utils.Pair;
import static com.google.common.collect.Iterables.filter;
public class DateTieredCompactionStrategy extends AbstractCompactionStrategy
{
private static final Logger logger = LoggerFactory.getLogger(DateTieredCompactionStrategy.class);
private final DateTieredCompactionStrategyOptions options;
protected volatile int estimatedRemainingTasks;
private final Set<SSTableReader> sstables = new HashSet<>();
private long lastExpiredCheck;
private final SizeTieredCompactionStrategyOptions stcsOptions;
public DateTieredCompactionStrategy(ColumnFamilyStore cfs, Map<String, String> options)
{
super(cfs, options);
this.estimatedRemainingTasks = 0;
this.options = new DateTieredCompactionStrategyOptions(options);
if (!options.containsKey(AbstractCompactionStrategy.TOMBSTONE_COMPACTION_INTERVAL_OPTION) && !options.containsKey(AbstractCompactionStrategy.TOMBSTONE_THRESHOLD_OPTION))
{
disableTombstoneCompactions = true;
logger.trace("Disabling tombstone compactions for DTCS");
}
else
logger.trace("Enabling tombstone compactions for DTCS");
this.stcsOptions = new SizeTieredCompactionStrategyOptions(options);
}
@Override
@SuppressWarnings("resource")
public synchronized AbstractCompactionTask getNextBackgroundTask(int gcBefore)
{
while (true)
{
List<SSTableReader> latestBucket = getNextBackgroundSSTables(gcBefore);
if (latestBucket.isEmpty())
return null;
LifecycleTransaction modifier = cfs.getTracker().tryModify(latestBucket, OperationType.COMPACTION);
if (modifier != null)
return new CompactionTask(cfs, modifier, gcBefore);
}
}
/**
*
* @param gcBefore
* @return
*/
private List<SSTableReader> getNextBackgroundSSTables(final int gcBefore)
{
if (Iterables.isEmpty(cfs.getSSTables(SSTableSet.LIVE)))
return Collections.emptyList();
Set<SSTableReader> uncompacting = ImmutableSet.copyOf(filter(cfs.getUncompactingSSTables(), sstables::contains));
Set<SSTableReader> expired = Collections.emptySet();
// we only check for expired sstables every 10 minutes (by default) due to it being an expensive operation
if (System.currentTimeMillis() - lastExpiredCheck > options.expiredSSTableCheckFrequency)
{
// Find fully expired SSTables. Those will be included no matter what.
expired = CompactionController.getFullyExpiredSSTables(cfs, uncompacting, cfs.getOverlappingSSTables(SSTableSet.CANONICAL, uncompacting), gcBefore);
lastExpiredCheck = System.currentTimeMillis();
}
Set<SSTableReader> candidates = Sets.newHashSet(filterSuspectSSTables(uncompacting));
List<SSTableReader> compactionCandidates = new ArrayList<>(getNextNonExpiredSSTables(Sets.difference(candidates, expired), gcBefore));
if (!expired.isEmpty())
{
logger.trace("Including expired sstables: {}", expired);
compactionCandidates.addAll(expired);
}
return compactionCandidates;
}
private List<SSTableReader> getNextNonExpiredSSTables(Iterable<SSTableReader> nonExpiringSSTables, final int gcBefore)
{
int base = cfs.getMinimumCompactionThreshold();
long now = getNow();
List<SSTableReader> mostInteresting = getCompactionCandidates(nonExpiringSSTables, now, base);
if (mostInteresting != null)
{
return mostInteresting;
}
// if there is no sstable to compact in standard way, try compacting single sstable whose droppable tombstone
// ratio is greater than threshold.
List<SSTableReader> sstablesWithTombstones = Lists.newArrayList();
for (SSTableReader sstable : nonExpiringSSTables)
{
if (worthDroppingTombstones(sstable, gcBefore))
sstablesWithTombstones.add(sstable);
}
if (sstablesWithTombstones.isEmpty())
return Collections.emptyList();
return Collections.singletonList(Collections.min(sstablesWithTombstones, new SSTableReader.SizeComparator()));
}
private List<SSTableReader> getCompactionCandidates(Iterable<SSTableReader> candidateSSTables, long now, int base)
{
Iterable<SSTableReader> candidates = filterOldSSTables(Lists.newArrayList(candidateSSTables), options.maxSSTableAge, now);
List<List<SSTableReader>> buckets = getBuckets(createSSTableAndMinTimestampPairs(candidates), options.baseTime, base, now, options.maxWindowSize);
logger.debug("Compaction buckets are {}", buckets);
updateEstimatedCompactionsByTasks(buckets);
List<SSTableReader> mostInteresting = newestBucket(buckets,
cfs.getMinimumCompactionThreshold(),
cfs.getMaximumCompactionThreshold(),
now,
options.baseTime,
options.maxWindowSize,
stcsOptions);
if (!mostInteresting.isEmpty())
return mostInteresting;
return null;
}
/**
* Gets the timestamp that DateTieredCompactionStrategy considers to be the "current time".
* @return the maximum timestamp across all SSTables.
* @throws java.util.NoSuchElementException if there are no SSTables.
*/
private long getNow()
{
// no need to convert to collection if had an Iterables.max(), but not present in standard toolkit, and not worth adding
List<SSTableReader> list = new ArrayList<>();
Iterables.addAll(list, cfs.getSSTables(SSTableSet.LIVE));
return Collections.max(list, (o1, o2) -> Long.compare(o1.getMaxTimestamp(), o2.getMaxTimestamp()))
.getMaxTimestamp();
}
/**
* Removes all sstables with max timestamp older than maxSSTableAge.
* @param sstables all sstables to consider
* @param maxSSTableAge the age in milliseconds when an SSTable stops participating in compactions
* @param now current time. SSTables with max timestamp less than (now - maxSSTableAge) are filtered.
* @return a list of sstables with the oldest sstables excluded
*/
@VisibleForTesting
static Iterable<SSTableReader> filterOldSSTables(List<SSTableReader> sstables, long maxSSTableAge, long now)
{
if (maxSSTableAge == 0)
return sstables;
final long cutoff = now - maxSSTableAge;
return filter(sstables, new Predicate<SSTableReader>()
{
@Override
public boolean apply(SSTableReader sstable)
{
return sstable.getMaxTimestamp() >= cutoff;
}
});
}
/**
*
* @param sstables
* @return
*/
public static List<Pair<SSTableReader, Long>> createSSTableAndMinTimestampPairs(Iterable<SSTableReader> sstables)
{
List<Pair<SSTableReader, Long>> sstableMinTimestampPairs = Lists.newArrayListWithCapacity(Iterables.size(sstables));
for (SSTableReader sstable : sstables)
sstableMinTimestampPairs.add(Pair.create(sstable, sstable.getMinTimestamp()));
return sstableMinTimestampPairs;
}
@Override
public void addSSTable(SSTableReader sstable)
{
sstables.add(sstable);
}
@Override
public void removeSSTable(SSTableReader sstable)
{
sstables.remove(sstable);
}
/**
* A target time span used for bucketing SSTables based on timestamps.
*/
private static class Target
{
// How big a range of timestamps fit inside the target.
public final long size;
// A timestamp t hits the target iff t / size == divPosition.
public final long divPosition;
public final long maxWindowSize;
public Target(long size, long divPosition, long maxWindowSize)
{
this.size = size;
this.divPosition = divPosition;
this.maxWindowSize = maxWindowSize;
}
/**
* Compares the target to a timestamp.
* @param timestamp the timestamp to compare.
* @return a negative integer, zero, or a positive integer as the target lies before, covering, or after than the timestamp.
*/
public int compareToTimestamp(long timestamp)
{
return Long.compare(divPosition, timestamp / size);
}
/**
* Tells if the timestamp hits the target.
* @param timestamp the timestamp to test.
* @return <code>true</code> iff timestamp / size == divPosition.
*/
public boolean onTarget(long timestamp)
{
return compareToTimestamp(timestamp) == 0;
}
/**
* Gets the next target, which represents an earlier time span.
* @param base The number of contiguous targets that will have the same size. Targets following those will be <code>base</code> times as big.
* @return
*/
public Target nextTarget(int base)
{
if (divPosition % base > 0 || size * base > maxWindowSize)
return new Target(size, divPosition - 1, maxWindowSize);
else
return new Target(size * base, divPosition / base - 1, maxWindowSize);
}
}
/**
* Group files with similar min timestamp into buckets. Files with recent min timestamps are grouped together into
* buckets designated to short timespans while files with older timestamps are grouped into buckets representing
* longer timespans.
* @param files pairs consisting of a file and its min timestamp
* @param timeUnit
* @param base
* @param now
* @return a list of buckets of files. The list is ordered such that the files with newest timestamps come first.
* Each bucket is also a list of files ordered from newest to oldest.
*/
@VisibleForTesting
static <T> List<List<T>> getBuckets(Collection<Pair<T, Long>> files, long timeUnit, int base, long now, long maxWindowSize)
{
// Sort files by age. Newest first.
final List<Pair<T, Long>> sortedFiles = Lists.newArrayList(files);
Collections.sort(sortedFiles, Collections.reverseOrder(new Comparator<Pair<T, Long>>()
{
public int compare(Pair<T, Long> p1, Pair<T, Long> p2)
{
return p1.right.compareTo(p2.right);
}
}));
List<List<T>> buckets = Lists.newArrayList();
Target target = getInitialTarget(now, timeUnit, maxWindowSize);
PeekingIterator<Pair<T, Long>> it = Iterators.peekingIterator(sortedFiles.iterator());
outerLoop:
while (it.hasNext())
{
while (!target.onTarget(it.peek().right))
{
// If the file is too new for the target, skip it.
if (target.compareToTimestamp(it.peek().right) < 0)
{
it.next();
if (!it.hasNext())
break outerLoop;
}
else // If the file is too old for the target, switch targets.
target = target.nextTarget(base);
}
List<T> bucket = Lists.newArrayList();
while (target.onTarget(it.peek().right))
{
bucket.add(it.next().left);
if (!it.hasNext())
break;
}
buckets.add(bucket);
}
return buckets;
}
@VisibleForTesting
static Target getInitialTarget(long now, long timeUnit, long maxWindowSize)
{
return new Target(timeUnit, now / timeUnit, maxWindowSize);
}
private void updateEstimatedCompactionsByTasks(List<List<SSTableReader>> tasks)
{
int n = 0;
for (List<SSTableReader> bucket : tasks)
{
for (List<SSTableReader> stcsBucket : getSTCSBuckets(bucket, stcsOptions))
if (stcsBucket.size() >= cfs.getMinimumCompactionThreshold())
n += Math.ceil((double)stcsBucket.size() / cfs.getMaximumCompactionThreshold());
}
estimatedRemainingTasks = n;
}
/**
* @param buckets list of buckets, sorted from newest to oldest, from which to return the newest bucket within thresholds.
* @param minThreshold minimum number of sstables in a bucket to qualify.
* @param maxThreshold maximum number of sstables to compact at once (the returned bucket will be trimmed down to this).
* @return a bucket (list) of sstables to compact.
*/
@VisibleForTesting
static List<SSTableReader> newestBucket(List<List<SSTableReader>> buckets, int minThreshold, int maxThreshold, long now, long baseTime, long maxWindowSize, SizeTieredCompactionStrategyOptions stcsOptions)
{
// If the "incoming window" has at least minThreshold SSTables, choose that one.
// For any other bucket, at least 2 SSTables is enough.
// In any case, limit to maxThreshold SSTables.
Target incomingWindow = getInitialTarget(now, baseTime, maxWindowSize);
for (List<SSTableReader> bucket : buckets)
{
boolean inFirstWindow = incomingWindow.onTarget(bucket.get(0).getMinTimestamp());
if (bucket.size() >= minThreshold || (bucket.size() >= 2 && !inFirstWindow))
{
List<SSTableReader> stcsSSTables = getSSTablesForSTCS(bucket, inFirstWindow ? minThreshold : 2, maxThreshold, stcsOptions);
if (!stcsSSTables.isEmpty())
return stcsSSTables;
}
}
return Collections.emptyList();
}
private static List<SSTableReader> getSSTablesForSTCS(Collection<SSTableReader> sstables, int minThreshold, int maxThreshold, SizeTieredCompactionStrategyOptions stcsOptions)
{
List<SSTableReader> s = SizeTieredCompactionStrategy.mostInterestingBucket(getSTCSBuckets(sstables, stcsOptions), minThreshold, maxThreshold);
logger.debug("Got sstables {} for STCS from {}", s, sstables);
return s;
}
private static List<List<SSTableReader>> getSTCSBuckets(Collection<SSTableReader> sstables, SizeTieredCompactionStrategyOptions stcsOptions)
{
List<Pair<SSTableReader,Long>> pairs = SizeTieredCompactionStrategy.createSSTableAndLengthPairs(AbstractCompactionStrategy.filterSuspectSSTables(sstables));
return SizeTieredCompactionStrategy.getBuckets(pairs,
stcsOptions.bucketHigh,
stcsOptions.bucketLow,
stcsOptions.minSSTableSize);
}
@Override
@SuppressWarnings("resource")
public synchronized Collection<AbstractCompactionTask> getMaximalTask(int gcBefore, boolean splitOutput)
{
Iterable<SSTableReader> filteredSSTables = filterSuspectSSTables(sstables);
if (Iterables.isEmpty(filteredSSTables))
return null;
LifecycleTransaction txn = cfs.getTracker().tryModify(filteredSSTables, OperationType.COMPACTION);
if (txn == null)
return null;
return Collections.<AbstractCompactionTask>singleton(new CompactionTask(cfs, txn, gcBefore));
}
@Override
@SuppressWarnings("resource")
public synchronized AbstractCompactionTask getUserDefinedTask(Collection<SSTableReader> sstables, int gcBefore)
{
assert !sstables.isEmpty(); // checked for by CM.submitUserDefined
LifecycleTransaction modifier = cfs.getTracker().tryModify(sstables, OperationType.COMPACTION);
if (modifier == null)
{
logger.trace("Unable to mark {} for compaction; probably a background compaction got to it first. You can disable background compactions temporarily if this is a problem", sstables);
return null;
}
return new CompactionTask(cfs, modifier, gcBefore).setUserDefined(true);
}
public int getEstimatedRemainingTasks()
{
return estimatedRemainingTasks;
}
public long getMaxSSTableBytes()
{
return Long.MAX_VALUE;
}
/**
* DTCS should not group sstables for anticompaction - this can mix new and old data
*/
@Override
public Collection<Collection<SSTableReader>> groupSSTablesForAntiCompaction(Collection<SSTableReader> sstablesToGroup)
{
Collection<Collection<SSTableReader>> groups = new ArrayList<>();
for (SSTableReader sstable : sstablesToGroup)
{
groups.add(Collections.singleton(sstable));
}
return groups;
}
public static Map<String, String> validateOptions(Map<String, String> options) throws ConfigurationException
{
Map<String, String> uncheckedOptions = AbstractCompactionStrategy.validateOptions(options);
uncheckedOptions = DateTieredCompactionStrategyOptions.validateOptions(options, uncheckedOptions);
uncheckedOptions.remove(CompactionParams.Option.MIN_THRESHOLD.toString());
uncheckedOptions.remove(CompactionParams.Option.MAX_THRESHOLD.toString());
uncheckedOptions = SizeTieredCompactionStrategyOptions.validateOptions(options, uncheckedOptions);
return uncheckedOptions;
}
public String toString()
{
return String.format("DateTieredCompactionStrategy[%s/%s]",
cfs.getMinimumCompactionThreshold(),
cfs.getMaximumCompactionThreshold());
}
}
| apache-2.0 |
flashscope/testJavaWebServer | public_html/ex07_cli.py | 443 | # Echo client program
import socket
import time
HOST = 'localhost'
PORT = 8000
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((HOST, PORT))
s.sendall('GET /\r\n\r\n')
resp = ''
while 1:
data = s.recv(8192)
resp += data
if not data:
break
else:
print len(data), 'bytes read'
for i in xrange(100):
print i
time.sleep(1)
s.close()
print 'Received:', resp
#print 'Received', repr(data)
| apache-2.0 |
kvr000/zbynek-java-exp | gradle/gradle-generate-exp/src/main/java/X.java | 18 | public class X {}
| apache-2.0 |
rrpgfirecast/firecast | Plugins/Sheets/Ficha Tormenta 20/output/rdkObjs/Ficha Tormenta 20/MagiaForm.lfm.lua | 10528 | require("firecast.lua");
local __o_rrpgObjs = require("rrpgObjs.lua");
require("rrpgGUI.lua");
require("rrpgDialogs.lua");
require("rrpgLFM.lua");
require("ndb.lua");
require("locale.lua");
local __o_Utils = require("utils.lua");
local function constructNew_frmMagiaForm()
local obj = GUI.fromHandle(_obj_newObject("form"));
local self = obj;
local sheet = nil;
rawset(obj, "_oldSetNodeObjectFunction", rawget(obj, "setNodeObject"));
function obj:setNodeObject(nodeObject)
sheet = nodeObject;
self.sheet = nodeObject;
self:_oldSetNodeObjectFunction(nodeObject);
end;
function obj:setNodeDatabase(nodeObject)
self:setNodeObject(nodeObject);
end;
_gui_assignInitialParentForForm(obj.handle);
obj:beginUpdate();
obj:setName("frmMagiaForm");
obj:setWidth(550);
obj:setHeight(150);
obj:setMargins({right=5,bottom=5});
obj:setTheme("light");
obj.rectangle1 = GUI.fromHandle(_obj_newObject("rectangle"));
obj.rectangle1:setParent(obj);
obj.rectangle1:setWidth(550);
obj.rectangle1:setHeight(50);
obj.rectangle1:setColor("#F0FFFF");
obj.rectangle1:setName("rectangle1");
obj.layout1 = GUI.fromHandle(_obj_newObject("layout"));
obj.layout1:setParent(obj.rectangle1);
obj.layout1:setAlign("left");
obj.layout1:setWidth(150);
obj.layout1:setMargins({right=5});
obj.layout1:setName("layout1");
obj.label1 = GUI.fromHandle(_obj_newObject("label"));
obj.label1:setParent(obj.layout1);
obj.label1:setText("Magia");
obj.label1:setWidth(150);
obj.label1:setHorzTextAlign("leading");
obj.label1:setFontSize(13);
obj.label1:setName("label1");
obj.edit1 = GUI.fromHandle(_obj_newObject("edit"));
obj.edit1:setParent(obj.layout1);
obj.edit1:setTop(20);
obj.edit1:setField("nome");
obj.edit1:setWidth(150);
obj.edit1:setHeight(25);
obj.edit1:setName("edit1");
obj.layout2 = GUI.fromHandle(_obj_newObject("layout"));
obj.layout2:setParent(obj.rectangle1);
obj.layout2:setAlign("left");
obj.layout2:setWidth(75);
obj.layout2:setMargins({right=5});
obj.layout2:setName("layout2");
obj.label2 = GUI.fromHandle(_obj_newObject("label"));
obj.label2:setParent(obj.layout2);
obj.label2:setText("Escola");
obj.label2:setWidth(75);
obj.label2:setHorzTextAlign("leading");
obj.label2:setFontSize(13);
obj.label2:setName("label2");
obj.edit2 = GUI.fromHandle(_obj_newObject("edit"));
obj.edit2:setParent(obj.layout2);
obj.edit2:setTop(20);
obj.edit2:setField("escola");
obj.edit2:setWidth(75);
obj.edit2:setHeight(25);
obj.edit2:setName("edit2");
obj.layout3 = GUI.fromHandle(_obj_newObject("layout"));
obj.layout3:setParent(obj.rectangle1);
obj.layout3:setAlign("left");
obj.layout3:setWidth(50);
obj.layout3:setMargins({right=5});
obj.layout3:setName("layout3");
obj.label3 = GUI.fromHandle(_obj_newObject("label"));
obj.label3:setParent(obj.layout3);
obj.label3:setText("Execução");
obj.label3:setWidth(50);
obj.label3:setHorzTextAlign("leading");
obj.label3:setFontSize(11);
obj.label3:setName("label3");
obj.edit3 = GUI.fromHandle(_obj_newObject("edit"));
obj.edit3:setParent(obj.layout3);
obj.edit3:setTop(20);
obj.edit3:setField("execucao");
obj.edit3:setWidth(50);
obj.edit3:setHeight(25);
obj.edit3:setName("edit3");
obj.layout4 = GUI.fromHandle(_obj_newObject("layout"));
obj.layout4:setParent(obj.rectangle1);
obj.layout4:setAlign("left");
obj.layout4:setWidth(50);
obj.layout4:setMargins({right=5});
obj.layout4:setName("layout4");
obj.label4 = GUI.fromHandle(_obj_newObject("label"));
obj.label4:setParent(obj.layout4);
obj.label4:setText("Alcance");
obj.label4:setWidth(50);
obj.label4:setHorzTextAlign("leading");
obj.label4:setFontSize(13);
obj.label4:setName("label4");
obj.edit4 = GUI.fromHandle(_obj_newObject("edit"));
obj.edit4:setParent(obj.layout4);
obj.edit4:setTop(20);
obj.edit4:setField("alcance");
obj.edit4:setWidth(50);
obj.edit4:setHeight(25);
obj.edit4:setName("edit4");
obj.layout5 = GUI.fromHandle(_obj_newObject("layout"));
obj.layout5:setParent(obj.rectangle1);
obj.layout5:setAlign("left");
obj.layout5:setWidth(50);
obj.layout5:setMargins({right=5});
obj.layout5:setName("layout5");
obj.label5 = GUI.fromHandle(_obj_newObject("label"));
obj.label5:setParent(obj.layout5);
obj.label5:setText("Área");
obj.label5:setWidth(50);
obj.label5:setHorzTextAlign("leading");
obj.label5:setFontSize(13);
obj.label5:setName("label5");
obj.edit5 = GUI.fromHandle(_obj_newObject("edit"));
obj.edit5:setParent(obj.layout5);
obj.edit5:setTop(20);
obj.edit5:setField("area");
obj.edit5:setWidth(50);
obj.edit5:setHeight(25);
obj.edit5:setName("edit5");
obj.layout6 = GUI.fromHandle(_obj_newObject("layout"));
obj.layout6:setParent(obj.rectangle1);
obj.layout6:setAlign("left");
obj.layout6:setWidth(50);
obj.layout6:setMargins({right=5});
obj.layout6:setName("layout6");
obj.label6 = GUI.fromHandle(_obj_newObject("label"));
obj.label6:setParent(obj.layout6);
obj.label6:setText("Duração");
obj.label6:setWidth(50);
obj.label6:setHorzTextAlign("leading");
obj.label6:setFontSize(13);
obj.label6:setName("label6");
obj.edit6 = GUI.fromHandle(_obj_newObject("edit"));
obj.edit6:setParent(obj.layout6);
obj.edit6:setTop(20);
obj.edit6:setField("duracao");
obj.edit6:setWidth(50);
obj.edit6:setHeight(25);
obj.edit6:setName("edit6");
obj.layout7 = GUI.fromHandle(_obj_newObject("layout"));
obj.layout7:setParent(obj.rectangle1);
obj.layout7:setAlign("left");
obj.layout7:setWidth(50);
obj.layout7:setMargins({right=5});
obj.layout7:setName("layout7");
obj.label7 = GUI.fromHandle(_obj_newObject("label"));
obj.label7:setParent(obj.layout7);
obj.label7:setText("Resistência");
obj.label7:setWidth(50);
obj.label7:setHorzTextAlign("leading");
obj.label7:setFontSize(9);
obj.label7:setName("label7");
obj.edit7 = GUI.fromHandle(_obj_newObject("edit"));
obj.edit7:setParent(obj.layout7);
obj.edit7:setTop(20);
obj.edit7:setField("resistencia");
obj.edit7:setWidth(50);
obj.edit7:setHeight(25);
obj.edit7:setName("edit7");
obj.button1 = GUI.fromHandle(_obj_newObject("button"));
obj.button1:setParent(obj.rectangle1);
obj.button1:setAlign("left");
obj.button1:setWidth(25);
obj.button1:setText("X");
obj.button1:setMargins({left=5});
obj.button1:setName("button1");
obj.textEditor1 = GUI.fromHandle(_obj_newObject("textEditor"));
obj.textEditor1:setParent(obj);
obj.textEditor1:setTop(50);
obj.textEditor1:setWidth(550);
obj.textEditor1:setHeight(100);
obj.textEditor1:setField("efeito");
obj.textEditor1:setMargins({left=5,right=5,bottom=5});
obj.textEditor1:setName("textEditor1");
obj._e_event0 = obj.button1:addEventListener("onClick",
function (_)
dialogs.confirmOkCancel("Tem certeza que quer apagar essa magia?",
function (confirmado)
if confirmado then
ndb.deleteNode(sheet);
end;
end);
end, obj);
function obj:_releaseEvents()
__o_rrpgObjs.removeEventListenerById(self._e_event0);
end;
obj._oldLFMDestroy = obj.destroy;
function obj:destroy()
self:_releaseEvents();
if (self.handle ~= 0) and (self.setNodeDatabase ~= nil) then
self:setNodeDatabase(nil);
end;
if self.label7 ~= nil then self.label7:destroy(); self.label7 = nil; end;
if self.edit3 ~= nil then self.edit3:destroy(); self.edit3 = nil; end;
if self.edit7 ~= nil then self.edit7:destroy(); self.edit7 = nil; end;
if self.label5 ~= nil then self.label5:destroy(); self.label5 = nil; end;
if self.layout6 ~= nil then self.layout6:destroy(); self.layout6 = nil; end;
if self.button1 ~= nil then self.button1:destroy(); self.button1 = nil; end;
if self.label1 ~= nil then self.label1:destroy(); self.label1 = nil; end;
if self.layout4 ~= nil then self.layout4:destroy(); self.layout4 = nil; end;
if self.edit4 ~= nil then self.edit4:destroy(); self.edit4 = nil; end;
if self.label3 ~= nil then self.label3:destroy(); self.label3 = nil; end;
if self.label4 ~= nil then self.label4:destroy(); self.label4 = nil; end;
if self.label6 ~= nil then self.label6:destroy(); self.label6 = nil; end;
if self.textEditor1 ~= nil then self.textEditor1:destroy(); self.textEditor1 = nil; end;
if self.layout3 ~= nil then self.layout3:destroy(); self.layout3 = nil; end;
if self.edit5 ~= nil then self.edit5:destroy(); self.edit5 = nil; end;
if self.edit2 ~= nil then self.edit2:destroy(); self.edit2 = nil; end;
if self.edit6 ~= nil then self.edit6:destroy(); self.edit6 = nil; end;
if self.layout1 ~= nil then self.layout1:destroy(); self.layout1 = nil; end;
if self.rectangle1 ~= nil then self.rectangle1:destroy(); self.rectangle1 = nil; end;
if self.edit1 ~= nil then self.edit1:destroy(); self.edit1 = nil; end;
if self.layout2 ~= nil then self.layout2:destroy(); self.layout2 = nil; end;
if self.layout5 ~= nil then self.layout5:destroy(); self.layout5 = nil; end;
if self.layout7 ~= nil then self.layout7:destroy(); self.layout7 = nil; end;
if self.label2 ~= nil then self.label2:destroy(); self.label2 = nil; end;
self:_oldLFMDestroy();
end;
obj:endUpdate();
return obj;
end;
function newfrmMagiaForm()
local retObj = nil;
__o_rrpgObjs.beginObjectsLoading();
__o_Utils.tryFinally(
function()
retObj = constructNew_frmMagiaForm();
end,
function()
__o_rrpgObjs.endObjectsLoading();
end);
assert(retObj ~= nil);
return retObj;
end;
local _frmMagiaForm = {
newEditor = newfrmMagiaForm,
new = newfrmMagiaForm,
name = "frmMagiaForm",
dataType = "",
formType = "undefined",
formComponentName = "form",
title = "",
description=""};
frmMagiaForm = _frmMagiaForm;
Firecast.registrarForm(_frmMagiaForm);
return _frmMagiaForm;
| apache-2.0 |
Yiiinsh/x-pipe | redis/redis-keeper/src/main/java/com/ctrip/xpipe/redis/keeper/config/DefaultKeeperContainerConfig.java | 807 | package com.ctrip.xpipe.redis.keeper.config;
import com.ctrip.xpipe.api.config.Config;
import javax.annotation.PostConstruct;
/**
* @author Jason Song(song_s@ctrip.com)
*/
public class DefaultKeeperContainerConfig implements KeeperContainerConfig {
public static final String REPLICATION_STORE_DIR = "replication.store.dir";
private Config config;
@PostConstruct
private void init() {
config = Config.DEFAULT;
}
@Override
public String getReplicationStoreDir() {
return config.get(REPLICATION_STORE_DIR, getDefaultRdsDir());
}
@Override
public String getMetaServerUrl() {
return config.get(META_SERVER_URL, "http://127.0.0.1:9747");
}
private String getDefaultRdsDir() {
return System.getProperty("user.dir");
}
}
| apache-2.0 |
dennybaa/mistral | mistral/tests/unit/test_expressions.py | 9515 | # Copyright 2013 - Mirantis, Inc.
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mistral import exceptions as exc
from mistral import expressions as expr
from mistral.tests import base
DATA = {
"server": {
"id": "03ea824a-aa24-4105-9131-66c48ae54acf",
"name": "cloud-fedora",
"status": "ACTIVE"
},
"status": "OK"
}
SERVERS = {
"servers": [
{'name': 'centos'},
{'name': 'ubuntu'},
{'name': 'fedora'}
]
}
class YaqlEvaluatorTest(base.BaseTest):
def setUp(self):
super(YaqlEvaluatorTest, self).setUp()
self._evaluator = expr.YAQLEvaluator()
def test_expression_result(self):
res = self._evaluator.evaluate('$.server', DATA)
self.assertEqual(res, {
'id': "03ea824a-aa24-4105-9131-66c48ae54acf",
'name': 'cloud-fedora',
'status': 'ACTIVE'
})
res = self._evaluator.evaluate('$.server.id', DATA)
self.assertEqual(res, '03ea824a-aa24-4105-9131-66c48ae54acf')
res = self._evaluator.evaluate("$.server.status = 'ACTIVE'", DATA)
self.assertTrue(res)
def test_wrong_expression(self):
res = self._evaluator.evaluate("$.status = 'Invalid value'", DATA)
self.assertFalse(res)
self.assertRaises(
exc.YaqlEvaluationException,
self._evaluator.evaluate,
'$.wrong_key',
DATA
)
expression_str = 'invalid_expression_string'
res = self._evaluator.evaluate(expression_str, DATA)
self.assertEqual(res, expression_str)
def test_select_result(self):
res = self._evaluator.evaluate(
'$.servers.where($.name = ubuntu)',
SERVERS
)
item = list(res)[0]
self.assertEqual(item, {'name': 'ubuntu'})
def test_function_string(self):
self.assertEqual('3', self._evaluator.evaluate('str($)', '3'))
self.assertEqual('3', self._evaluator.evaluate('str($)', 3))
def test_function_len(self):
self.assertEqual(3, self._evaluator.evaluate('len($)', 'hey'))
data = [{'some': 'thing'}]
self.assertEqual(
1,
self._evaluator.evaluate('$.where($.some = thing).len()', data)
)
def test_validate(self):
self._evaluator.validate('abc')
self._evaluator.validate('1')
self._evaluator.validate('1 + 2')
self._evaluator.validate('$.a1')
self._evaluator.validate('$.a1 * $.a2')
def test_validate_failed(self):
self.assertRaises(exc.YaqlEvaluationException,
self._evaluator.validate,
'*')
self.assertRaises(exc.YaqlEvaluationException,
self._evaluator.validate,
[1, 2, 3])
self.assertRaises(exc.YaqlEvaluationException,
self._evaluator.validate,
{'a': 1})
class InlineYAQLEvaluatorTest(base.BaseTest):
def setUp(self):
super(InlineYAQLEvaluatorTest, self).setUp()
self._evaluator = expr.InlineYAQLEvaluator()
def test_multiple_placeholders(self):
expr_str = """
Statistics for tenant "<% $.project_id %>"
Number of virtual machines: <% $.vm_count %>
Number of active virtual machines: <% $.active_vm_count %>
Number of networks: <% $.net_count %>
-- Sincerely, Mistral Team.
"""
result = self._evaluator.evaluate(
expr_str,
{
'project_id': '1-2-3-4',
'vm_count': 28,
'active_vm_count': 0,
'net_count': 1
}
)
expected_result = """
Statistics for tenant "1-2-3-4"
Number of virtual machines: 28
Number of active virtual machines: 0
Number of networks: 1
-- Sincerely, Mistral Team.
"""
self.assertEqual(expected_result, result)
def test_function_string(self):
self.assertEqual('3', self._evaluator.evaluate('<% str($) %>', '3'))
self.assertEqual('3', self._evaluator.evaluate('<% str($) %>', 3))
def test_validate(self):
self._evaluator.validate('There is no expression.')
self._evaluator.validate('<% abc %>')
self._evaluator.validate('<% 1 %>')
self._evaluator.validate('<% 1 + 2 %>')
self._evaluator.validate('<% $.a1 %>')
self._evaluator.validate('<% $.a1 * $.a2 %>')
self._evaluator.validate('<% $.a1 %> is <% $.a2 %>')
self._evaluator.validate('The value is <% $.a1 %>.')
def test_validate_failed(self):
self.assertRaises(exc.YaqlEvaluationException,
self._evaluator.validate,
'The value is <% * %>.')
self.assertRaises(exc.YaqlEvaluationException,
self._evaluator.validate,
[1, 2, 3])
self.assertRaises(exc.YaqlEvaluationException,
self._evaluator.validate,
{'a': 1})
class ExpressionsTest(base.BaseTest):
def test_evaluate_complex_expressions(self):
data = {
'a': 1,
'b': 2,
'c': 3,
'd': True,
'e': False,
'f': 10.1,
'g': 10,
'h': [1, 2, 3, 4, 5],
'i': 'We are OpenStack!',
'j': 'World',
'k': 'Mistral',
'l': 'awesome',
'm': 'the way we roll'
}
test_cases = [
('<% $.a + $.b * $.c %>', 7),
('<%($.a + $.b) * $.c %>', 9),
('<% $.d and $.e %>', False),
('<% $.f > $.g %>', True),
('<% $.h.len() >= 5 %>', True),
('<% $.h.len() >= $.b + $.c %>', True),
('<% 100 in $.h %>', False),
('<% $.a in $.h%>', True),
('<% ''OpenStack'' in $.i %>', True),
('Hello, <% $.j %>!', 'Hello, World!'),
('<% $.k %> is <% $.l %>!', 'Mistral is awesome!'),
('This is <% $.m %>.', 'This is the way we roll.'),
('<% 1 + 1 = 3 %>', False)
]
for expression, expected in test_cases:
actual = expr.evaluate_recursively(expression, data)
self.assertEqual(actual, expected)
def test_evaluate_recursively(self):
task_spec_dict = {
'parameters': {
'p1': 'My string',
'p2': '<% $.param2 %>',
'p3': ''
},
'publish': {
'new_key11': 'new_key1'
}
}
modified_task = expr.evaluate_recursively(
task_spec_dict,
{'param2': 'val32'}
)
self.assertDictEqual(
{
'parameters': {
'p1': 'My string',
'p2': 'val32',
'p3': ''
},
'publish': {
'new_key11': 'new_key1'
}
},
modified_task
)
def test_evaluate_recursively_arbitrary_dict(self):
context = {
"auth_token": "123",
"project_id": "mistral"
}
data = {
"parameters": {
"parameter1": {
"name1": "<% $.auth_token %>",
"name2": "val_name2"
},
"param2": [
"var1",
"var2",
"/servers/<% $.project_id %>/bla"
]
},
"token": "<% $.auth_token %>"
}
applied = expr.evaluate_recursively(data, context)
self.assertDictEqual(
{
"parameters": {
"parameter1": {
"name1": "123",
"name2": "val_name2"
},
"param2": ["var1", "var2", "/servers/mistral/bla"]
},
"token": "123"
},
applied
)
def test_evaluate_recursively_environment(self):
environment = {
'host': 'vm1234.example.com',
'db': 'test',
'timeout': 600,
'verbose': True,
'__actions': {
'std.sql': {
'conn': 'mysql://admin:secrete@<% env().host %>'
'/<% env().db %>'
}
}
}
context = {
'__env': environment
}
defaults = context['__env']['__actions']['std.sql']
applied = expr.evaluate_recursively(defaults, context)
expected = 'mysql://admin:secrete@vm1234.example.com/test'
self.assertEqual(applied['conn'], expected)
| apache-2.0 |
drlove/opentele-server | src/java/org/opentele/server/cpr/stamdatalookup/generated/PersonCivilRegistrationStatusStructureType.java | 3182 |
package org.opentele.server.cpr.stamdatalookup.generated;
import java.math.BigInteger;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.datatype.XMLGregorianCalendar;
/**
* <p>Java class for PersonCivilRegistrationStatusStructureType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="PersonCivilRegistrationStatusStructureType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{http://rep.oio.dk/cpr.dk/xml/schemas/core/2005/11/24/}PersonCivilRegistrationStatusCode"/>
* <element ref="{http://rep.oio.dk/cpr.dk/xml/schemas/core/2006/01/17/}PersonCivilRegistrationStatusStartDate"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "PersonCivilRegistrationStatusStructureType", namespace = "http://rep.oio.dk/cpr.dk/xml/schemas/core/2006/01/17/", propOrder = {
"personCivilRegistrationStatusCode",
"personCivilRegistrationStatusStartDate"
})
public class PersonCivilRegistrationStatusStructureType {
@XmlElement(name = "PersonCivilRegistrationStatusCode", namespace = "http://rep.oio.dk/cpr.dk/xml/schemas/core/2005/11/24/", required = true)
protected BigInteger personCivilRegistrationStatusCode;
@XmlElement(name = "PersonCivilRegistrationStatusStartDate", required = true)
@XmlSchemaType(name = "date")
protected XMLGregorianCalendar personCivilRegistrationStatusStartDate;
/**
* Gets the value of the personCivilRegistrationStatusCode property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getPersonCivilRegistrationStatusCode() {
return personCivilRegistrationStatusCode;
}
/**
* Sets the value of the personCivilRegistrationStatusCode property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setPersonCivilRegistrationStatusCode(BigInteger value) {
this.personCivilRegistrationStatusCode = value;
}
/**
* Gets the value of the personCivilRegistrationStatusStartDate property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getPersonCivilRegistrationStatusStartDate() {
return personCivilRegistrationStatusStartDate;
}
/**
* Sets the value of the personCivilRegistrationStatusStartDate property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setPersonCivilRegistrationStatusStartDate(XMLGregorianCalendar value) {
this.personCivilRegistrationStatusStartDate = value;
}
}
| apache-2.0 |
dagnir/aws-sdk-java | aws-java-sdk-lexmodelbuilding/src/main/java/com/amazonaws/services/lexmodelbuilding/model/transform/GetBotChannelAssociationsRequestMarshaller.java | 3440 | /*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lexmodelbuilding.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.lexmodelbuilding.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* GetBotChannelAssociationsRequestMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class GetBotChannelAssociationsRequestMarshaller {
private static final MarshallingInfo<String> BOTNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PATH)
.marshallLocationName("botName").build();
private static final MarshallingInfo<String> BOTALIAS_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PATH)
.marshallLocationName("aliasName").build();
private static final MarshallingInfo<String> NEXTTOKEN_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.QUERY_PARAM).marshallLocationName("nextToken").build();
private static final MarshallingInfo<Integer> MAXRESULTS_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER)
.marshallLocation(MarshallLocation.QUERY_PARAM).marshallLocationName("maxResults").build();
private static final MarshallingInfo<String> NAMECONTAINS_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.QUERY_PARAM).marshallLocationName("nameContains").build();
private static final GetBotChannelAssociationsRequestMarshaller instance = new GetBotChannelAssociationsRequestMarshaller();
public static GetBotChannelAssociationsRequestMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(GetBotChannelAssociationsRequest getBotChannelAssociationsRequest, ProtocolMarshaller protocolMarshaller) {
if (getBotChannelAssociationsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getBotChannelAssociationsRequest.getBotName(), BOTNAME_BINDING);
protocolMarshaller.marshall(getBotChannelAssociationsRequest.getBotAlias(), BOTALIAS_BINDING);
protocolMarshaller.marshall(getBotChannelAssociationsRequest.getNextToken(), NEXTTOKEN_BINDING);
protocolMarshaller.marshall(getBotChannelAssociationsRequest.getMaxResults(), MAXRESULTS_BINDING);
protocolMarshaller.marshall(getBotChannelAssociationsRequest.getNameContains(), NAMECONTAINS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
jcheron/phalcon-jquery | Ajax/bootstrap/html/HtmlBadge.php | 437 | <?php
namespace Ajax\bootstrap\html;
/**
* Twitter Bootstrap Badge component
* @see http://getbootstrap.com/components/#badges
* @author jc
* @version 1.001
*/
use Ajax\bootstrap\html\base\HtmlBsDoubleElement;
class HtmlBadge extends HtmlBsDoubleElement {
public function __construct($identifier, $caption="") {
parent::__construct($identifier, "span");
$this->content=$caption;
$this->setProperty("class", "badge");
}
} | apache-2.0 |
Jkoza/ConUHacks | app/src/main/java/com/macadamian/smartpantry/ui/activities/AboutActivity.java | 1536 | package com.macadamian.smartpantry.ui.activities;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.res.Resources;
import android.os.Bundle;
import android.text.SpannableString;
import android.text.method.LinkMovementMethod;
import android.text.util.Linkify;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.macadamian.smartpantry.R;
public class AboutActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
overridePendingTransition(android.R.anim.fade_in, android.R.anim.fade_out);
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_about);
String[] urlArray = getResources().getStringArray(R.array.about_url_array);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT);
Resources r = getResources();
int px = (int) TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP,
r.getDimension(R.dimen.about_url_margin),
r.getDisplayMetrics());
params.setMargins(0, px, 0, 0);
}
@Override
public void finish(){
super.finish();
overridePendingTransition(android.R.anim.fade_in, android.R.anim.fade_out);
}
}
| apache-2.0 |
timofeevda/seismic-beachballs-demo | app/src/components/helpopup/helpopup.component.ts | 228 | import {Component, Input} from '@angular/core'
@Component({
template: require('./helpopup.component.html'),
selector: 'helpopup'
})
export class HelpPopupComponent {
@Input() header: string
@Input() id: string
} | apache-2.0 |
datawire/quark | setup.py | 2400 | # Copyright 2015 datawire. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
ROOT_DIR = os.path.dirname(__file__)
from setuptools import setup
metadata = {}
with open(os.path.join(ROOT_DIR, "quarkc/_metadata.py")) as fp:
exec(fp.read(), metadata)
with open(os.path.join(ROOT_DIR, "install-requirements.txt")) as fp:
install_requirements = [i.strip() for i in list(fp)
if i.strip() and not i.strip().startswith("#")]
setup(name=metadata["__title__"],
version=metadata["__version__"],
description=metadata["__summary__"],
author=metadata["__author__"],
author_email=metadata["__email__"],
url=metadata["__uri__"],
license=metadata["__license__"],
packages=['quarkc', 'quarkc.test'],
package_data={'': ['*.q', "lib/*.q", "lib/*.py", "lib/*.js", "lib/*.rb",
"lib/io/datawire/quark/netty/*.java",
"lib/io/datawire/quark/runtime/*.java",
"apidoc/*.css", "apidoc/*.html", "apidoc/*.js"]},
include_package_data=True,
install_requires=install_requirements,
entry_points={"console_scripts": ["quark = quarkc.command:call_main",
"quark-grammar = quarkc.parser:rules"]},
keywords=['IDL', 'service', 'microservice', 'RPC', 'async'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: MacOS',
'Operating System :: OS Independent',
'Operating System :: POSIX',
'Programming Language :: Java',
'Programming Language :: JavaScript',
'Programming Language :: Python',
'Topic :: Software Development'
]
)
| apache-2.0 |
census-instrumentation/opencensus-csharp | src/OpenCensus.Abstractions/Stats/Aggregations/ILastValueDataLong.cs | 1042 | // <copyright file="ILastValueDataLong.cs" company="OpenCensus Authors">
// Copyright 2018, OpenCensus Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
namespace OpenCensus.Stats.Aggregations
{
/// <summary>
/// Data accumulated by the last value aggregator amongst long values.
/// </summary>
public interface ILastValueDataLong : IAggregationData
{
/// <summary>
/// Gets the last value as a long value.
/// </summary>
long LastValue { get; }
}
}
| apache-2.0 |
PheMA/phema-executer | src/main/java/org/phema/executer/Runner.java | 2348 | package org.phema.executer;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.io.FileUtils;
import org.phema.executer.hqmf.IDocument;
import org.phema.executer.hqmf.Parser;
import org.phema.executer.interfaces.IValueSetRepository;
import org.phema.executer.valueSets.FileValueSetRepository;
import java.io.File;
import java.nio.charset.Charset;
import java.util.HashMap;
/**
* Created by Luke Rasmussen on 7/19/17.
*/
public class Runner {
public static void main(String[] args) {
org.phema.executer.translator.HqmfToI2b2 translator = null;
try {
Version version = new Version();
System.out.println(String.format("PhEMA Executer v%s", version.toString()));
if (args == null || args.length == 0) {
printUsage();
return;
}
File configFile = new File(args[0]);
if(configFile.isDirectory()) {
printUsage();
return;
}
ConsoleProgressObserver consoleLogger = new ConsoleProgressObserver();
long startTime = System.nanoTime();
translator = new org.phema.executer.translator.HqmfToI2b2();
translator.setLogger(consoleLogger);
translator.execute(configFile);
long endTime = System.nanoTime();
System.out.printf("\r\nElapsed execution time in seconds: %.2f\r\n", ((endTime - startTime) / 1000000000.0));
} catch (Exception e) {
e.printStackTrace();
}
finally {
if (translator != null) {
translator.close();
}
}
}
private static void printUsage() {
System.out.println();
System.out.println("java -jar phema-executer-lib.jar [config_file]");
System.out.println();
System.out.println("Options:");
System.out.println("config_file - The .conf file (including the relative or absolute path)\r\n which specifies the phenotype to run");
System.out.println();
System.out.println();
System.out.println("Example: java -jar phema-executer-lib.jar ./test/test-phenotype.conf");
System.out.println();
}
}
| apache-2.0 |
Nandiwalesamahj/nandi3 | test/controllers/shaskiyayojanas_controller_test.rb | 147 | require 'test_helper'
class ShaskiyayojanasControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| apache-2.0 |
leppa/home-assistant | tests/components/binary_sensor/test_init.py | 871 | """The tests for the Binary sensor component."""
import unittest
from unittest import mock
from homeassistant.components import binary_sensor
from homeassistant.const import STATE_OFF, STATE_ON
class TestBinarySensor(unittest.TestCase):
"""Test the binary_sensor base class."""
def test_state(self):
"""Test binary sensor state."""
sensor = binary_sensor.BinarySensorDevice()
assert STATE_OFF == sensor.state
with mock.patch(
"homeassistant.components.binary_sensor." "BinarySensorDevice.is_on",
new=False,
):
assert STATE_OFF == binary_sensor.BinarySensorDevice().state
with mock.patch(
"homeassistant.components.binary_sensor." "BinarySensorDevice.is_on",
new=True,
):
assert STATE_ON == binary_sensor.BinarySensorDevice().state
| apache-2.0 |
kubernetes-client/java | fluent/src/main/java/io/kubernetes/client/openapi/models/V2beta1ExternalMetricStatusFluent.java | 3354 | /*
Copyright 2022 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package io.kubernetes.client.openapi.models;
/** Generated */
public interface V2beta1ExternalMetricStatusFluent<
A extends io.kubernetes.client.openapi.models.V2beta1ExternalMetricStatusFluent<A>>
extends io.kubernetes.client.fluent.Fluent<A> {
public io.kubernetes.client.custom.Quantity getCurrentAverageValue();
public A withCurrentAverageValue(io.kubernetes.client.custom.Quantity currentAverageValue);
public java.lang.Boolean hasCurrentAverageValue();
public A withNewCurrentAverageValue(java.lang.String value);
public io.kubernetes.client.custom.Quantity getCurrentValue();
public A withCurrentValue(io.kubernetes.client.custom.Quantity currentValue);
public java.lang.Boolean hasCurrentValue();
public A withNewCurrentValue(java.lang.String value);
public java.lang.String getMetricName();
public A withMetricName(java.lang.String metricName);
public java.lang.Boolean hasMetricName();
/** Method is deprecated. use withMetricName instead. */
@java.lang.Deprecated
public A withNewMetricName(java.lang.String original);
/**
* This method has been deprecated, please use method buildMetricSelector instead.
*
* @return The buildable object.
*/
@java.lang.Deprecated
public io.kubernetes.client.openapi.models.V1LabelSelector getMetricSelector();
public io.kubernetes.client.openapi.models.V1LabelSelector buildMetricSelector();
public A withMetricSelector(io.kubernetes.client.openapi.models.V1LabelSelector metricSelector);
public java.lang.Boolean hasMetricSelector();
public io.kubernetes.client.openapi.models.V2beta1ExternalMetricStatusFluent.MetricSelectorNested<
A>
withNewMetricSelector();
public io.kubernetes.client.openapi.models.V2beta1ExternalMetricStatusFluent.MetricSelectorNested<
A>
withNewMetricSelectorLike(io.kubernetes.client.openapi.models.V1LabelSelector item);
public io.kubernetes.client.openapi.models.V2beta1ExternalMetricStatusFluent.MetricSelectorNested<
A>
editMetricSelector();
public io.kubernetes.client.openapi.models.V2beta1ExternalMetricStatusFluent.MetricSelectorNested<
A>
editOrNewMetricSelector();
public io.kubernetes.client.openapi.models.V2beta1ExternalMetricStatusFluent.MetricSelectorNested<
A>
editOrNewMetricSelectorLike(io.kubernetes.client.openapi.models.V1LabelSelector item);
public interface MetricSelectorNested<N>
extends io.kubernetes.client.fluent.Nested<N>,
io.kubernetes.client.openapi.models.V1LabelSelectorFluent<
io.kubernetes.client.openapi.models.V2beta1ExternalMetricStatusFluent
.MetricSelectorNested<
N>> {
public N and();
public N endMetricSelector();
}
}
| apache-2.0 |
prometheus/promdash | db/migrate/20160609130416_add_redirect_checkbox_to_dashboard.rb | 168 | class AddRedirectCheckboxToDashboard < ActiveRecord::Migration
def change
add_column :dashboards, :hard_redirect, :boolean, default: false, null: false
end
end
| apache-2.0 |
axeolotl/wsrp4cxf | commons/src/java/oasis/names/tc/wsrp/v1/types/Contact.java | 3587 |
package oasis.names.tc.wsrp.v1.types;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for Contact complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="Contact">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="postal" type="{urn:oasis:names:tc:wsrp:v1:types}Postal" minOccurs="0"/>
* <element name="telecom" type="{urn:oasis:names:tc:wsrp:v1:types}Telecom" minOccurs="0"/>
* <element name="online" type="{urn:oasis:names:tc:wsrp:v1:types}Online" minOccurs="0"/>
* <element name="extensions" type="{urn:oasis:names:tc:wsrp:v1:types}Extension" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "Contact", propOrder = {
"postal",
"telecom",
"online",
"extensions"
})
public class Contact {
protected Postal postal;
protected Telecom telecom;
protected Online online;
protected List<Extension> extensions;
/**
* Gets the value of the postal property.
*
* @return
* possible object is
* {@link Postal }
*
*/
public Postal getPostal() {
return postal;
}
/**
* Sets the value of the postal property.
*
* @param value
* allowed object is
* {@link Postal }
*
*/
public void setPostal(Postal value) {
this.postal = value;
}
/**
* Gets the value of the telecom property.
*
* @return
* possible object is
* {@link Telecom }
*
*/
public Telecom getTelecom() {
return telecom;
}
/**
* Sets the value of the telecom property.
*
* @param value
* allowed object is
* {@link Telecom }
*
*/
public void setTelecom(Telecom value) {
this.telecom = value;
}
/**
* Gets the value of the online property.
*
* @return
* possible object is
* {@link Online }
*
*/
public Online getOnline() {
return online;
}
/**
* Sets the value of the online property.
*
* @param value
* allowed object is
* {@link Online }
*
*/
public void setOnline(Online value) {
this.online = value;
}
/**
* Gets the value of the extensions property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the extensions property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getExtensions().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Extension }
*
*
*/
public List<Extension> getExtensions() {
if (extensions == null) {
extensions = new ArrayList<Extension>();
}
return this.extensions;
}
}
| apache-2.0 |
mike10004/appengine-imaging | gaecompat-awt-imaging/src/swing/com/gaecompat/javax/swing/text/AbstractDocumentI.java | 16678 | package com.gaecompat.javax.swing.text;
import java.io.PrintStream;
import java.util.Dictionary;
import java.util.EventListener;
import com.gaecompat.javax.swing.event.DocumentListener;
public interface AbstractDocumentI {
/**
* Supports managing a set of properties. Callers
* can use the <code>documentProperties</code> dictionary
* to annotate the document with document-wide properties.
*
* @return a non-<code>null</code> <code>Dictionary</code>
* @see #setDocumentProperties
*/
public abstract Dictionary<Object, Object> getDocumentProperties();
/**
* Replaces the document properties dictionary for this document.
*
* @param x the new dictionary
* @see #getDocumentProperties
*/
public abstract void setDocumentProperties(Dictionary<Object, Object> x);
/**
* Returns an array of all the objects currently registered
* as <code><em>Foo</em>Listener</code>s
* upon this document.
* <code><em>Foo</em>Listener</code>s are registered using the
* <code>add<em>Foo</em>Listener</code> method.
*
* <p>
* You can specify the <code>listenerType</code> argument
* with a class literal, such as
* <code><em>Foo</em>Listener.class</code>.
* For example, you can query a
* document <code>d</code>
* for its document listeners with the following code:
*
* <pre>DocumentListener[] mls = (DocumentListener[])(d.getListeners(DocumentListener.class));</pre>
*
* If no such listeners exist, this method returns an empty array.
*
* @param listenerType the type of listeners requested; this parameter
* should specify an interface that descends from
* <code>java.util.EventListener</code>
* @return an array of all objects registered as
* <code><em>Foo</em>Listener</code>s on this component,
* or an empty array if no such
* listeners have been added
* @exception ClassCastException if <code>listenerType</code>
* doesn't specify a class or interface that implements
* <code>java.util.EventListener</code>
*
* @see #getDocumentListeners
* @see #getUndoableEditListeners
*
* @since 1.3
*/
public abstract <T extends EventListener> T[] getListeners(
Class<T> listenerType);
/**
* Gets the asynchronous loading priority. If less than zero,
* the document should not be loaded asynchronously.
*
* @return the asynchronous loading priority, or <code>-1</code>
* if the document should not be loaded asynchronously
*/
public abstract int getAsynchronousLoadPriority();
/**
* Sets the asynchronous loading priority.
* @param p the new asynchronous loading priority; a value
* less than zero indicates that the document should not be
* loaded asynchronously
*/
public abstract void setAsynchronousLoadPriority(int p);
/**
* Sets the <code>DocumentFilter</code>. The <code>DocumentFilter</code>
* is passed <code>insert</code> and <code>remove</code> to conditionally
* allow inserting/deleting of the text. A <code>null</code> value
* indicates that no filtering will occur.
*
* @param filter the <code>DocumentFilter</code> used to constrain text
* @see #getDocumentFilter
* @since 1.4
*/
public abstract void setDocumentFilter(DocumentFilter filter);
/**
* Returns the <code>DocumentFilter</code> that is responsible for
* filtering of insertion/removal. A <code>null</code> return value
* implies no filtering is to occur.
*
* @since 1.4
* @see #setDocumentFilter
* @return the DocumentFilter
*/
public abstract DocumentFilter getDocumentFilter();
/**
* This allows the model to be safely rendered in the presence
* of currency, if the model supports being updated asynchronously.
* The given runnable will be executed in a way that allows it
* to safely read the model with no changes while the runnable
* is being executed. The runnable itself may <em>not</em>
* make any mutations.
* <p>
* This is implemented to aquire a read lock for the duration
* of the runnables execution. There may be multiple runnables
* executing at the same time, and all writers will be blocked
* while there are active rendering runnables. If the runnable
* throws an exception, its lock will be safely released.
* There is no protection against a runnable that never exits,
* which will effectively leave the document locked for it's
* lifetime.
* <p>
* If the given runnable attempts to make any mutations in
* this implementation, a deadlock will occur. There is
* no tracking of individual rendering threads to enable
* detecting this situation, but a subclass could incur
* the overhead of tracking them and throwing an error.
* <p>
* This method is thread safe, although most Swing methods
* are not. Please see
* <A HREF="http://java.sun.com/docs/books/tutorial/uiswing/misc/threads.html">How
* to Use Threads</A> for more information.
*
* @param r the renderer to execute
*/
public abstract void render(Runnable r);
/**
* Returns the length of the data. This is the number of
* characters of content that represents the users data.
*
* @return the length >= 0
* @see Document#getLength
*/
public abstract int getLength();
/**
* Adds a document listener for notification of any changes.
*
* @param listener the <code>DocumentListener</code> to add
* @see Document#addDocumentListener
*/
public abstract void addDocumentListener(DocumentListener listener);
/**
* Removes a document listener.
*
* @param listener the <code>DocumentListener</code> to remove
* @see Document#removeDocumentListener
*/
public abstract void removeDocumentListener(DocumentListener listener);
/**
* Returns an array of all the document listeners
* registered on this document.
*
* @return all of this document's <code>DocumentListener</code>s
* or an empty array if no document listeners are
* currently registered
*
* @see #addDocumentListener
* @see #removeDocumentListener
* @since 1.4
*/
public abstract DocumentListener[] getDocumentListeners();
/**
* Adds an undo listener for notification of any changes.
* Undo/Redo operations performed on the <code>UndoableEdit</code>
* will cause the appropriate DocumentEvent to be fired to keep
* the view(s) in sync with the model.
*
* @param listener the <code>UndoableEditListener</code> to add
* @see Document#addUndoableEditListener
*/
public abstract void addUndoableEditListener(UndoableEditListener listener);
/**
* Removes an undo listener.
*
* @param listener the <code>UndoableEditListener</code> to remove
* @see Document#removeDocumentListener
*/
public abstract void removeUndoableEditListener(
UndoableEditListener listener);
/**
* Returns an array of all the undoable edit listeners
* registered on this document.
*
* @return all of this document's <code>UndoableEditListener</code>s
* or an empty array if no undoable edit listeners are
* currently registered
*
* @see #addUndoableEditListener
* @see #removeUndoableEditListener
*
* @since 1.4
*/
public abstract UndoableEditListener[] getUndoableEditListeners();
/**
* A convenience method for looking up a property value. It is
* equivalent to:
* <pre>
* getDocumentProperties().get(key);
* </pre>
*
* @param key the non-<code>null</code> property key
* @return the value of this property or <code>null</code>
* @see #getDocumentProperties
*/
public abstract Object getProperty(Object key);
/**
* A convenience method for storing up a property value. It is
* equivalent to:
* <pre>
* getDocumentProperties().put(key, value);
* </pre>
* If <code>value</code> is <code>null</code> this method will
* remove the property.
*
* @param key the non-<code>null</code> key
* @param value the property value
* @see #getDocumentProperties
*/
public abstract void putProperty(Object key, Object value);
/**
* Removes some content from the document.
* Removing content causes a write lock to be held while the
* actual changes are taking place. Observers are notified
* of the change on the thread that called this method.
* <p>
* This method is thread safe, although most Swing methods
* are not. Please see
* <A HREF="http://java.sun.com/docs/books/tutorial/uiswing/misc/threads.html">How
* to Use Threads</A> for more information.
*
* @param offs the starting offset >= 0
* @param len the number of characters to remove >= 0
* @exception BadLocationException the given remove position is not a valid
* position within the document
* @see Document#remove
*/
public abstract void remove(int offs, int len) throws BadLocationException;
/**
* Deletes the region of text from <code>offset</code> to
* <code>offset + length</code>, and replaces it with <code>text</code>.
* It is up to the implementation as to how this is implemented, some
* implementations may treat this as two distinct operations: a remove
* followed by an insert, others may treat the replace as one atomic
* operation.
*
* @param offset index of child element
* @param length length of text to delete, may be 0 indicating don't
* delete anything
* @param text text to insert, <code>null</code> indicates no text to insert
* @param attrs AttributeSet indicating attributes of inserted text,
* <code>null</code>
* is legal, and typically treated as an empty attributeset,
* but exact interpretation is left to the subclass
* @exception BadLocationException the given position is not a valid
* position within the document
* @since 1.4
*/
public abstract void replace(int offset, int length, String text,
AttributeSet attrs) throws BadLocationException;
/**
* Inserts some content into the document.
* Inserting content causes a write lock to be held while the
* actual changes are taking place, followed by notification
* to the observers on the thread that grabbed the write lock.
* <p>
* This method is thread safe, although most Swing methods
* are not. Please see
* <A HREF="http://java.sun.com/docs/books/tutorial/uiswing/misc/threads.html">How
* to Use Threads</A> for more information.
*
* @param offs the starting offset >= 0
* @param str the string to insert; does nothing with null/empty strings
* @param a the attributes for the inserted content
* @exception BadLocationException the given insert position is not a valid
* position within the document
* @see Document#insertString
*/
public abstract void insertString(int offs, String str, AttributeSet a)
throws BadLocationException;
/**
* Gets a sequence of text from the document.
*
* @param offset the starting offset >= 0
* @param length the number of characters to retrieve >= 0
* @return the text
* @exception BadLocationException the range given includes a position
* that is not a valid position within the document
* @see Document#getText
*/
public abstract String getText(int offset, int length)
throws BadLocationException;
/**
* Fetches the text contained within the given portion
* of the document.
* <p>
* If the partialReturn property on the txt parameter is false, the
* data returned in the Segment will be the entire length requested and
* may or may not be a copy depending upon how the data was stored.
* If the partialReturn property is true, only the amount of text that
* can be returned without creating a copy is returned. Using partial
* returns will give better performance for situations where large
* parts of the document are being scanned. The following is an example
* of using the partial return to access the entire document:
* <p>
* <pre>
* int nleft = doc.getDocumentLength();
* Segment text = new Segment();
* int offs = 0;
* text.setPartialReturn(true);
* while (nleft > 0) {
* doc.getText(offs, nleft, text);
* // do something with text
* nleft -= text.count;
* offs += text.count;
* }
* </pre>
*
* @param offset the starting offset >= 0
* @param length the number of characters to retrieve >= 0
* @param txt the Segment object to retrieve the text into
* @exception BadLocationException the range given includes a position
* that is not a valid position within the document
*/
public abstract void getText(int offset, int length, Segment txt)
throws BadLocationException;
/**
* Returns a position that will track change as the document
* is altered.
* <p>
* This method is thread safe, although most Swing methods
* are not. Please see
* <A HREF="http://java.sun.com/docs/books/tutorial/uiswing/misc/threads.html">How
* to Use Threads</A> for more information.
*
* @param offs the position in the model >= 0
* @return the position
* @exception BadLocationException if the given position does not
* represent a valid location in the associated document
* @see Document#createPosition
*/
public abstract Position createPosition(int offs)
throws BadLocationException;
/**
* Returns a position that represents the start of the document. The
* position returned can be counted on to track change and stay
* located at the beginning of the document.
*
* @return the position
*/
public abstract Position getStartPosition();
/**
* Returns a position that represents the end of the document. The
* position returned can be counted on to track change and stay
* located at the end of the document.
*
* @return the position
*/
public abstract Position getEndPosition();
/**
* Gets all root elements defined. Typically, there
* will only be one so the default implementation
* is to return the default root element.
*
* @return the root element
*/
public abstract Element[] getRootElements();
/**
* Returns the root element that views should be based upon
* unless some other mechanism for assigning views to element
* structures is provided.
*
* @return the root element
* @see Document#getDefaultRootElement
*/
public abstract Element getDefaultRootElement();
/**
* Returns the root element of the bidirectional structure for this
* document. Its children represent character runs with a given
* Unicode bidi level.
*/
public abstract Element getBidiRootElement();
/**
* Get the paragraph element containing the given position. Sub-classes
* must define for themselves what exactly constitutes a paragraph. They
* should keep in mind however that a paragraph should at least be the
* unit of text over which to run the Unicode bidirectional algorithm.
*
* @param pos the starting offset >= 0
* @return the element */
public abstract Element getParagraphElement(int pos);
/**
* Gives a diagnostic dump.
*
* @param out the output stream
*/
public abstract void dump(PrintStream out);
/**
* Acquires a lock to begin reading some state from the
* document. There can be multiple readers at the same time.
* Writing blocks the readers until notification of the change
* to the listeners has been completed. This method should
* be used very carefully to avoid unintended compromise
* of the document. It should always be balanced with a
* <code>readUnlock</code>.
*
* @see #readUnlock
*/
public abstract void readLock();
/**
* Does a read unlock. This signals that one
* of the readers is done. If there are no more readers
* then writing can begin again. This should be balanced
* with a readLock, and should occur in a finally statement
* so that the balance is guaranteed. The following is an
* example.
* <pre><code>
* readLock();
* try {
* // do something
* } finally {
* readUnlock();
* }
* </code></pre>
*
* @see #readLock
*/
public abstract void readUnlock();
/**
* Name of elements used to represent paragraphs
*/
public static final String ParagraphElementName = "paragraph";
/**
* Name of elements used to represent content
*/
public static final String ContentElementName = "content";
/**
* Name of elements used to hold sections (lines/paragraphs).
*/
public static final String SectionElementName = "section";
/**
* Name of elements used to hold a unidirectional run
*/
public static final String BidiElementName = "bidi level";
/**
* Name of the attribute used to specify element
* names.
*/
public static final String ElementNameAttribute = "$ename";
} | apache-2.0 |
HarryMMR/Woodmin | app/src/main/java/app/bennsandoval/com/woodmin/data/WoodminProvider.java | 14261 | package app.bennsandoval.com.woodmin.data;
import android.content.ContentProvider;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.net.Uri;
/**
* Created by Mackbook on 12/23/14.
*/
public class WoodminProvider extends ContentProvider {
private static final UriMatcher sUriMatcher = buildUriMatcher();
private WoodminDbHelper mOpenHelper;
private static final int SHOP = 100;
private static final int SHOP_ID = 101;
private static final int ORDER = 200;
private static final int ORDER_ID = 201;
private static final int PRODUCT = 300;
private static final int PRODUCT_ID = 301;
private static final int CONSUMER = 400;
private static final int CONSUMER_ID = 401;
@Override
public boolean onCreate() {
mOpenHelper = new WoodminDbHelper(getContext());
return false;
}
@Override
public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) {
Cursor retCursor;
switch (sUriMatcher.match(uri)) {
// "shop/#"
case SHOP_ID:{
retCursor = mOpenHelper.getReadableDatabase().query(
WoodminContract.ShopEntry.TABLE_NAME,
projection,
WoodminContract.ShopEntry._ID + " = '" + ContentUris.parseId(uri) + "'",
null,
null,
null,
sortOrder
);
break;
}
// "shop"
case SHOP: {
retCursor = mOpenHelper.getReadableDatabase().query(
WoodminContract.ShopEntry.TABLE_NAME,
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
break;
}
// "order/#"
case ORDER_ID:{
retCursor = mOpenHelper.getReadableDatabase().query(
WoodminContract.OrdersEntry.TABLE_NAME,
projection,
WoodminContract.OrdersEntry._ID + " = '" + ContentUris.parseId(uri) + "'",
null,
null,
null,
sortOrder
);
break;
}
// "order"
case ORDER: {
retCursor = mOpenHelper.getReadableDatabase().query(
WoodminContract.OrdersEntry.TABLE_NAME,
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
break;
}
// "product/#"
case PRODUCT_ID:{
retCursor = mOpenHelper.getReadableDatabase().query(
WoodminContract.ProductEntry.TABLE_NAME,
projection,
WoodminContract.ProductEntry._ID + " = '" + ContentUris.parseId(uri) + "'",
null,
null,
null,
sortOrder
);
break;
}
// "product"
case PRODUCT: {
retCursor = mOpenHelper.getReadableDatabase().query(
WoodminContract.ProductEntry.TABLE_NAME,
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
break;
}
// "consumer/#"
case CONSUMER_ID:{
retCursor = mOpenHelper.getReadableDatabase().query(
WoodminContract.CostumerEntry.TABLE_NAME,
projection,
WoodminContract.CostumerEntry._ID + " = '" + ContentUris.parseId(uri) + "'",
null,
null,
null,
sortOrder
);
break;
}
// "consumer"
case CONSUMER: {
retCursor = mOpenHelper.getReadableDatabase().query(
WoodminContract.CostumerEntry.TABLE_NAME,
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
retCursor.setNotificationUri(getContext().getContentResolver(), uri);
return retCursor;
}
@Override
public String getType(Uri uri) {
final int match = sUriMatcher.match(uri);
switch (match) {
case SHOP:
return WoodminContract.ShopEntry.CONTENT_TYPE;
case SHOP_ID:
return WoodminContract.ShopEntry.CONTENT_ITEM_TYPE;
case ORDER:
return WoodminContract.OrdersEntry.CONTENT_TYPE;
case ORDER_ID:
return WoodminContract.OrdersEntry.CONTENT_ITEM_TYPE;
case PRODUCT:
return WoodminContract.ProductEntry.CONTENT_TYPE;
case PRODUCT_ID:
return WoodminContract.ProductEntry.CONTENT_ITEM_TYPE;
case CONSUMER:
return WoodminContract.CostumerEntry.CONTENT_TYPE;
case CONSUMER_ID:
return WoodminContract.CostumerEntry.CONTENT_ITEM_TYPE;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
@Override
public Uri insert(Uri uri, ContentValues contentValues) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
Uri returnUri;
switch (match) {
case SHOP: {
db.beginTransaction();
try {
long _id = db.insert(WoodminContract.ShopEntry.TABLE_NAME, null, contentValues);
if ( _id > 0 )
returnUri = WoodminContract.ShopEntry.buildShopUri(_id);
else
throw new android.database.SQLException("Failed to insert row into " + uri);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
break;
}
case ORDER: {
db.beginTransaction();
try {
long _id = db.insert(WoodminContract.OrdersEntry.TABLE_NAME, null, contentValues);
if ( _id > 0 )
returnUri = WoodminContract.OrdersEntry.buildOrderUri(_id);
else
throw new android.database.SQLException("Failed to insert row into " + uri);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
break;
}
case PRODUCT: {
db.beginTransaction();
try {
long _id = db.insert(WoodminContract.ProductEntry.TABLE_NAME, null, contentValues);
if ( _id > 0 )
returnUri = WoodminContract.ProductEntry.buildOrderUri(_id);
else
throw new android.database.SQLException("Failed to insert row into " + uri);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
break;
}
case CONSUMER: {
db.beginTransaction();
try {
long _id = db.insert(WoodminContract.CostumerEntry.TABLE_NAME, null, contentValues);
if ( _id > 0 )
returnUri = WoodminContract.CostumerEntry.buildOrderUri(_id);
else
throw new android.database.SQLException("Failed to insert row into " + uri);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
getContext().getContentResolver().notifyChange(uri, null, false);
return returnUri;
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
int rowsDeleted;
switch (match) {
case SHOP:
db.beginTransaction();
try {
rowsDeleted = db.delete(
WoodminContract.ShopEntry.TABLE_NAME, selection, selectionArgs);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
break;
case ORDER:
db.beginTransaction();
try {
rowsDeleted = db.delete(
WoodminContract.OrdersEntry.TABLE_NAME, selection, selectionArgs);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
break;
case PRODUCT:
db.beginTransaction();
try {
rowsDeleted = db.delete(
WoodminContract.ProductEntry.TABLE_NAME, selection, selectionArgs);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
break;
case CONSUMER:
db.beginTransaction();
try {
rowsDeleted = db.delete(
WoodminContract.CostumerEntry.TABLE_NAME, selection, selectionArgs);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
// Because a null deletes all rows
if (selection == null || rowsDeleted != 0) {
getContext().getContentResolver().notifyChange(uri, null, false);
}
return rowsDeleted;
}
@Override
public int update(Uri uri, ContentValues contentValues, String selection, String[] selectionArgs) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
int rowsUpdated;
switch (match) {
case SHOP:
db.beginTransaction();
try {
rowsUpdated = db.update(WoodminContract.ShopEntry.TABLE_NAME, contentValues, selection,
selectionArgs);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
break;
case ORDER:
db.beginTransaction();
try {
rowsUpdated = db.update(WoodminContract.OrdersEntry.TABLE_NAME, contentValues, selection,
selectionArgs);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
break;
case PRODUCT:
db.beginTransaction();
try {
rowsUpdated = db.update(WoodminContract.ProductEntry.TABLE_NAME, contentValues, selection,
selectionArgs);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
break;
case CONSUMER:
db.beginTransaction();
try {
rowsUpdated = db.update(WoodminContract.CostumerEntry.TABLE_NAME, contentValues, selection,
selectionArgs);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
if (rowsUpdated != 0) {
getContext().getContentResolver().notifyChange(uri, null, false);
}
return rowsUpdated;
}
private static UriMatcher buildUriMatcher() {
final UriMatcher matcher = new UriMatcher(UriMatcher.NO_MATCH);
final String authority = WoodminContract.CONTENT_AUTHORITY;
matcher.addURI(authority, WoodminContract.PATH_SHOP, SHOP);
matcher.addURI(authority, WoodminContract.PATH_SHOP + "/#", SHOP_ID);
matcher.addURI(authority, WoodminContract.PATH_ORDER, ORDER);
matcher.addURI(authority, WoodminContract.PATH_ORDER + "/#", ORDER_ID);
matcher.addURI(authority, WoodminContract.PATH_PRODUCT, PRODUCT);
matcher.addURI(authority, WoodminContract.PATH_PRODUCT + "/#", PRODUCT_ID);
matcher.addURI(authority, WoodminContract.PATH_COSTUMER, CONSUMER);
matcher.addURI(authority, WoodminContract.PATH_COSTUMER + "/#", CONSUMER_ID);
return matcher;
}
}
| apache-2.0 |
tudway/Qart | Src/src/Qart.Testing/ActionPipeline/Actions/AssertContentDiffAction.cs | 2304 | using Newtonsoft.Json.Linq;
using Qart.Testing.Diff;
using Qart.Testing.Framework;
using Qart.Testing.Framework.Json;
using System.Linq;
namespace Qart.Testing.ActionPipeline.Actions
{
public class AssertContentDiffAction : IPipelineAction
{
private readonly string _baseFile;
private readonly string _jsonPath;
private readonly string _itemKey;
private readonly string _diffName;
private readonly string _categoriesFile;
private readonly ITokenSelectorProvider _tokenSelectorProvider;
public AssertContentDiffAction(ITokenSelectorProvider tokenSelectorProvider, string baseFile, string diffName, string jsonPath = null, string itemKey = null, string categoriesFile = null)
{
_baseFile = baseFile;
_jsonPath = jsonPath;
_itemKey = itemKey;
_categoriesFile = categoriesFile;
_diffName = diffName;
_tokenSelectorProvider = tokenSelectorProvider;
}
public void Execute(TestCaseContext testCaseContext)
{
var effectiveItemKey = testCaseContext.GetEffectiveItemKey(_itemKey);
testCaseContext.DescriptionWriter.AddNote("AssertContentDiff", $"{effectiveItemKey} => file: {_diffName}, base: {_baseFile}");
var actual = testCaseContext.GetRequiredItemAsJToken(effectiveItemKey);
if (!string.IsNullOrEmpty(_jsonPath))
{
actual = new JArray(actual.SelectTokens(_jsonPath));
}
var testCase = testCaseContext.TestCase;
var expectedBase = testCase.GetObjectFromJson<JToken>(_baseFile);
var diffs = JsonPatchCreator.Compare(expectedBase, actual, _tokenSelectorProvider);
(var mismatches, var expected) = testCase.CompareAndRebase(actual, expectedBase, diffs, _diffName, _tokenSelectorProvider, testCaseContext.Options.IsRebaseline());
if (mismatches.Count > 0)
{
var matchedDiffCategories = testCaseContext.GetDiffCategories(actual, expected, _categoriesFile).ToList();
throw new AssertException("Unexpected token changes:" + string.Join("\n", mismatches.Select(d => d.JsonPath)), matchedDiffCategories);
}
}
}
}
| apache-2.0 |
BigData-Abgabe/abgabe | Abgabe04/Aufgabe2/bigdata/ThreeLetterCodeMapper.java | 3157 | package bigdata;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.StringUtils;
import java.io.IOException;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
public class ThreeLetterCodeMapper extends Mapper<Object, Text, Text, Text> {
private Text prot = new Text();
private Text aa = new Text();
private Configuration conf;
private BufferedReader fis;
Map<String, String> letterCode = new HashMap<String, String>();
static enum CountersEnum{ AA, Proteins }
@Override
public void setup(Context context)
throws IOException, InterruptedException {
conf= context.getConfiguration();
URI[] codeURIs= Job.getInstance(conf).getCacheFiles();
for(URI codeURI: codeURIs) {
Path patternsPath= new Path(codeURI.getPath());
String patternsFileName= patternsPath.getName().toString();
parseAcidFile(patternsFileName);
}
}
//parses Aminosäure File
private void parseAcidFile(String fileName) {
try{
fis= new BufferedReader(new FileReader(fileName));
String line= null;
String[] map = new String[2];
int counter;
while((line= fis.readLine()) != null) {
StringTokenizer itr = new StringTokenizer(line);
counter = 0;
while (itr.hasMoreTokens()) {
if (counter < 2){
map[counter] = itr.nextToken();
counter++;
}
else
break;
}
// System.out.println(line);
// System.out.println("Here is map : "+map[0]+"_"+map[1]);
letterCode.put(map[0],map[1]);
}
} catch (IOException e) {
System.err.println("Caughtexceptionwhileparsingthecachedfile'"+ StringUtils.stringifyException(e));
}
}
@Override
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
String protein = value.toString();;
String threeCode= "";
Character c;
//Counts proteins
Counter proteins= context.getCounter(CountersEnum.class.getName(),CountersEnum.Proteins.toString());
proteins.increment(1);
for (int i = 0; i < protein.length();i++){
//counts aminoacids
Counter amino = context.getCounter(CountersEnum.class.getName(),CountersEnum.AA.toString());
amino.increment(1);
// converts one letter code in three letter code if pattern is given
c=protein.charAt(i);
String tmp = letterCode.get(c.toString() );
if ( tmp != null ){
threeCode += tmp;
}
else
System.out.println( protein.charAt(i)+" is not inplemented in Aminosäuredatei" );
}
//System.out.println( "New: "+threeCode );
aa.set(key.toString());
prot.set(threeCode);
context.write(aa, prot);
}
} | apache-2.0 |
ricardojsanchez/nanite-deployer | init.rb | 97 | register Rabbit.new
register Monit.new
register Initd.new
register Deployer.new
#register God.new | apache-2.0 |
g8os/core0 | apps/core0/bootstrap/network/protocol.go | 323 | package network
import "fmt"
type Protocol interface {
Configure(mgr NetworkManager, inf string) error
}
var (
protocols = map[string]Protocol{}
)
func GetProtocol(name string) (Protocol, error) {
if proto, ok := protocols[name]; ok {
return proto, nil
}
return nil, fmt.Errorf("unknown protocol '%s'", name)
}
| apache-2.0 |
uronce-cc/alluxio | shell/src/main/java/alluxio/shell/command/ChownCommand.java | 2834 | /*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.shell.command;
import alluxio.AlluxioURI;
import alluxio.client.file.FileSystem;
import alluxio.client.file.options.SetAttributeOptions;
import alluxio.exception.AlluxioException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import java.io.IOException;
import javax.annotation.concurrent.ThreadSafe;
/**
* Changes the owner of a file or directory specified by args.
*/
@ThreadSafe
public final class ChownCommand extends AbstractShellCommand {
private static final Option RECURSIVE_OPTION =
Option.builder("R")
.required(false)
.hasArg(false)
.desc("change owner recursively")
.build();
/**
* Creates a new instance of {@link ChownCommand}.
*
* @param fs an Alluxio file system handle
*/
public ChownCommand(FileSystem fs) {
super(fs);
}
@Override
public String getCommandName() {
return "chown";
}
@Override
protected int getNumOfArgs() {
return 2;
}
@Override
public Options getOptions() {
return new Options().addOption(RECURSIVE_OPTION);
}
/**
* Changes the owner for the directory or file with the path specified in args.
*
* @param path The {@link AlluxioURI} path as the input of the command
* @param owner The owner to be updated to the file or directory
* @param recursive Whether change the owner recursively
*/
private void chown(AlluxioURI path, String owner, boolean recursive)
throws AlluxioException, IOException {
SetAttributeOptions options =
SetAttributeOptions.defaults().setOwner(owner).setRecursive(recursive);
mFileSystem.setAttribute(path, options);
System.out.println("Changed owner of " + path + " to " + owner);
}
@Override
public int run(CommandLine cl) throws AlluxioException, IOException {
String[] args = cl.getArgs();
String owner = args[0];
AlluxioURI path = new AlluxioURI(args[1]);
chown(path, owner, cl.hasOption("R"));
return 0;
}
@Override
public String getUsage() {
return "chown [-R] <owner> <path>";
}
@Override
public String getDescription() {
return "Changes the owner of a file or directory specified by args."
+ " Specify -R to change the owner recursively.";
}
}
| apache-2.0 |
joewalnes/idea-community | plugins/groovy/src/org/jetbrains/plugins/groovy/lang/psi/impl/statements/expressions/types/GrInstanceofExpressionImpl.java | 2031 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.types;
import com.intellij.lang.ASTNode;
import com.intellij.psi.JavaPsiFacade;
import com.intellij.psi.PsiType;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.plugins.groovy.debugger.GroovyPositionManager;
import org.jetbrains.plugins.groovy.lang.psi.GroovyElementVisitor;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrInstanceOfExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.types.GrTypeElement;
import org.jetbrains.plugins.groovy.lang.psi.impl.GroovyPsiManager;
import org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.GrExpressionImpl;
/**
* @author ven
*/
public class GrInstanceofExpressionImpl extends GrExpressionImpl implements GrInstanceOfExpression {
public GrInstanceofExpressionImpl(@NotNull ASTNode node) {
super(node);
}
public void accept(GroovyElementVisitor visitor) {
visitor.visitInstanceofExpression(this);
}
public String toString() {
return "Instanceof expression";
}
public PsiType getType() {
return getTypeByFQName("java.lang.Boolean");
}
public GrTypeElement getTypeElement() {
return findChildByClass(GrTypeElement.class);
}
public GrExpression getOperand() {
return findChildByClass(GrExpression.class);
}
} | apache-2.0 |
m3db/m3db | src/query/generated/mocks/generate.go | 3541 | // Copyright (c) 2018 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
// mockgen rules for generating mocks for exported interfaces (reflection mode).
//go:generate sh -c "mockgen -package=downsample $PACKAGE/src/cmd/services/m3coordinator/downsample Downsampler,MetricsAppender,SamplesAppender | genclean -pkg $PACKAGE/src/cmd/services/m3coordinator/downsample -out $GOPATH/src/$PACKAGE/src/cmd/services/m3coordinator/downsample/downsample_mock.go"
//go:generate sh -c "mockgen -package=storage -destination=$GOPATH/src/$PACKAGE/src/query/storage/storage_mock.go $PACKAGE/src/query/storage Storage"
//go:generate sh -c "mockgen -package=m3 -destination=$GOPATH/src/$PACKAGE/src/query/storage/m3/m3_mock.go $PACKAGE/src/query/storage/m3 Storage,ClusterNamespace"
//go:generate sh -c "mockgen -package=ts -destination=$GOPATH/src/$PACKAGE/src/query/ts/ts_mock.go $PACKAGE/src/query/ts Values"
//go:generate sh -c "mockgen -package=block -destination=$GOPATH/src/$PACKAGE/src/query/block/block_mock.go $PACKAGE/src/query/block Block,StepIter,Builder,Step,SeriesIter"
//go:generate sh -c "mockgen -package=ingest -destination=$GOPATH/src/$PACKAGE/src/cmd/services/m3coordinator/ingest/write_mock.go $PACKAGE/src/cmd/services/m3coordinator/ingest DownsamplerAndWriter"
//go:generate sh -c "mockgen -package=transform -destination=$GOPATH/src/$PACKAGE/src/query/executor/transform/types_mock.go $PACKAGE/src/query/executor/transform OpNode"
//go:generate sh -c "mockgen -package=executor -destination=$GOPATH/src/$PACKAGE/src/query/executor/types_mock.go $PACKAGE/src/query/executor Engine"
//go:generate sh -c "mockgen -package=storage -destination=$GOPATH/src/$PACKAGE/src/query/graphite/storage/storage_mock.go $PACKAGE/src/query/graphite/storage Storage"
// mockgen rules for generating mocks for unexported interfaces (file mode).
//go:generate sh -c "mockgen -package=m3ql -destination=$GOPATH/src/github.com/m3db/m3/src/query/parser/m3ql/types_mock.go -source=$GOPATH/src/github.com/m3db/m3/src/query/parser/m3ql/types.go"
//go:generate sh -c "mockgen -package=transform -destination=$GOPATH/src/github.com/m3db/m3/src/query/executor/transform/exec_mock.go -source=$GOPATH/src/github.com/m3db/m3/src/query/executor/transform/exec.go"
//go:generate sh -c "mockgen -package=temporal -destination=$GOPATH/src/github.com/m3db/m3/src/query/functions/temporal/dependencies_mock.go -source=$GOPATH/src/github.com/m3db/m3/src/query/functions/temporal/dependencies.go" controller
package mocks
| apache-2.0 |
open-orchestra/open-orchestra-theme-bundle | ThemeBundle/Tests/Form/Type/ThemeChoiceTypeTest.php | 1433 | <?php
namespace OpenOrchestra\ThemeBundle\Tests\Form\Type;
use OpenOrchestra\BaseBundle\Tests\AbstractTest\AbstractBaseTestCase;
use OpenOrchestra\ThemeBundle\Form\Type\ThemeChoiceType;
use Phake;
/**
* ThemeChoiceTypeTest class
*/
class ThemeChoiceTypeTest extends AbstractBaseTestCase
{
protected $themeChoiceType;
/**
* Set up the test
*/
public function setUp()
{
$themes = array(
'themeId1' => array('name' => 'Dummy theme #1'),
'themeId2' => array('name' => 'Dummy theme #2'),
);
$this->themeChoiceType = new ThemeChoiceType($themes);
}
/**
* Test set default options
*/
public function testSetDefaultOptions()
{
$choices = array(
'themeId1' => 'Dummy theme #1',
'themeId2' => 'Dummy theme #2',
);
$resolverMock = Phake::mock('Symfony\Component\OptionsResolver\OptionsResolver');
$this->themeChoiceType->configureOptions($resolverMock);
Phake::verify($resolverMock)->setDefaults(array('choices' => $choices));
}
/**
* Test get parent
*/
public function testGetParent()
{
$this->assertEquals('choice', $this->themeChoiceType->getParent());
}
/**
* Test name
*/
public function testGetName()
{
$this->assertEquals('orchestra_theme_choice', $this->themeChoiceType->getName());
}
}
| apache-2.0 |
SE-Group1/HookedUp | HookedUp/api/company/posts/index.php | 483 | <?php
require $_SERVER['DOCUMENT_ROOT'] . '/api/tools.php';
$method = htmlspecialchars($_SERVER['REQUEST_METHOD']);
switch ($method) {
case 'GET':
if (!$companyId = getGETSafe('companyId')) {
failure("companyId required arg");
}
$query = "SELECT id FROM post WHERE originCompanyId = ? ORDER BY createdAt DESC";
$posts = exec_stmt($query, "s", $companyId);
success($posts);
}
?> | apache-2.0 |
wimsymons/sling | testing/http/clients/src/main/java/org/apache/sling/testing/clients/html/MicrodataClient.java | 3281 | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
******************************************************************************/
package org.apache.sling.testing.clients.html;
import org.apache.http.HttpEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.sling.hapi.client.HtmlClient;
import org.apache.sling.hapi.client.microdata.MicrodataDocument;
import org.apache.sling.testing.clients.ClientException;
import org.apache.sling.testing.clients.SlingClient;
import org.apache.sling.testing.clients.SlingClientConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.URI;
public class MicrodataClient extends SlingClient implements HtmlClient {
protected static final Logger LOG = LoggerFactory.getLogger(MicrodataClient.class);
public MicrodataClient(CloseableHttpClient http, SlingClientConfig config) throws ClientException {
super(http, config);
}
public MicrodataClient(URI url, String user, String password) throws ClientException {
super(url, user, password);
}
@Override
public MicrodataDocument enter(String url) throws org.apache.sling.hapi.client.ClientException {
return get(url);
}
@Override
public MicrodataDocument get(String url) throws org.apache.sling.hapi.client.ClientException {
try {
return newDocument(doGet(url).getContent());
} catch (ClientException e) {
throw new org.apache.sling.hapi.client.ClientException("Cannot create Microdata document", e);
}
}
@Override
public MicrodataDocument post(String url, HttpEntity entity) throws org.apache.sling.hapi.client.ClientException {
try {
return newDocument(doPost(url, entity).getContent());
} catch (ClientException e) {
throw new org.apache.sling.hapi.client.ClientException("Cannot create Microdata document", e);
}
}
@Override
public MicrodataDocument delete(String url) throws org.apache.sling.hapi.client.ClientException {
try {
return newDocument(doDelete(url, null, null).getContent());
} catch (ClientException e) {
throw new org.apache.sling.hapi.client.ClientException("Cannot create Microdata document", e);
}
}
@Override
public MicrodataDocument newDocument(String html) {
return new MicrodataDocument(html, this, this.getUrl().toString());
}
}
| apache-2.0 |
wlod4r/playground | src/test/java/com/gft/mlwz/AppTest.java | 640 | package com.gft.mlwz;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Unit test for simple App.
*/
public class AppTest
extends TestCase
{
/**
* Create the test case
*
* @param testName name of the test case
*/
public AppTest( String testName )
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( AppTest.class );
}
/**
* Rigourous Test :-)
*/
public void testApp()
{
assertTrue( true );
}
}
| apache-2.0 |
Azure/azure-mobile-apps-net-client | e2etests/DeviceTests.Shared/Helpers/Models/OfflineReadyItem.cs | 2749 | // ----------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// ----------------------------------------------------------------------------
using Microsoft.WindowsAzure.MobileServices;
using Newtonsoft.Json;
using System;
using System.Globalization;
namespace DeviceTests.Shared.Helpers.Models
{
[DataTable("OfflineReady")]
public class OfflineReadyItem
{
[JsonProperty("id")]
public string Id { get; set; }
[JsonProperty("version")]
public string Version { get; set; }
[JsonProperty("name")]
public string Name { get; set; }
[JsonProperty("age")]
public int Age { get; set; }
[JsonProperty("float")]
public double FloatingNumber { get; set; }
[JsonProperty("date")]
public DateTimeOffset Date { get; set; }
[JsonProperty("bool")]
public bool Flag { get; set; }
public OfflineReadyItem() { }
public OfflineReadyItem(Random rndGen)
{
this.Name = Utilities.CreateSimpleRandomString(rndGen, 10);
this.Age = rndGen.Next();
this.FloatingNumber = rndGen.Next() * rndGen.NextDouble();
this.Date = new DateTime(rndGen.Next(1980, 2000), rndGen.Next(1, 12), rndGen.Next(1, 25), rndGen.Next(0, 24), rndGen.Next(0, 60), rndGen.Next(0, 60), DateTimeKind.Utc);
this.Flag = rndGen.Next(2) == 0;
}
public override string ToString()
{
return string.Format(CultureInfo.InvariantCulture, "OfflineItem[Id={0},Name={1},Age={2},FloatingNumber={3},Date={4},Flag={5},Version={6}",
this.Id, this.Name, this.Age, this.FloatingNumber,
this.Date.ToString("o", CultureInfo.InvariantCulture), this.Flag, this.Version);
}
public override bool Equals(object obj)
{
const double acceptableDifference = 1e-6;
OfflineReadyItem other = obj as OfflineReadyItem;
if (other == null) return false;
if (this.Age != other.Age) return false;
if (!this.Date.ToUniversalTime().Equals(other.Date.ToUniversalTime())) return false;
if (this.Flag != other.Flag) return false;
if (this.Name != other.Name) return false;
if (Math.Abs(this.FloatingNumber - other.FloatingNumber) > acceptableDifference) return false;
return true;
}
public override int GetHashCode()
{
return this.Age.GetHashCode() ^
this.Date.ToUniversalTime().GetHashCode() ^
this.Flag.GetHashCode() ^
this.Name.GetHashCode();
}
}
} | apache-2.0 |
slothOn/whuassistant | Whuassist1/src/com/example/whuassist/test/NewsParserTest.java | 909 | package com.example.whuassist.test;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
public class NewsParserTest {
public static void main(String[] args){
File f=new File("E:\\³ÌÐò2\\android_work1\\indexzihuan.txt");
StringBuilder sb=new StringBuilder();
String line;
try{
FileInputStream input=new FileInputStream(f);
BufferedReader reader=new BufferedReader(new InputStreamReader(new FileInputStream(f)));
while((line=reader.readLine())!=null){
sb.append(line);
}
Document doc=Jsoup.parse(sb.toString());
Elements tables=doc.getElementsByTag("table");
for(Element e:tables){
System.out.println(e.text());
}
}catch(Exception e){
e.printStackTrace();
}
}
}
| apache-2.0 |
torakiki/sambox | src/main/java/org/sejda/sambox/pdmodel/interactive/annotation/PDAnnotationLink.java | 7853 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sejda.sambox.pdmodel.interactive.annotation;
import static java.util.Optional.ofNullable;
import java.io.IOException;
import org.sejda.sambox.cos.COSArray;
import org.sejda.sambox.cos.COSBase;
import org.sejda.sambox.cos.COSDictionary;
import org.sejda.sambox.cos.COSName;
import org.sejda.sambox.pdmodel.interactive.action.PDAction;
import org.sejda.sambox.pdmodel.interactive.action.PDActionFactory;
import org.sejda.sambox.pdmodel.interactive.action.PDActionURI;
import org.sejda.sambox.pdmodel.interactive.annotation.handlers.PDAppearanceHandler;
import org.sejda.sambox.pdmodel.interactive.annotation.handlers.PDLinkAppearanceHandler;
import org.sejda.sambox.pdmodel.interactive.documentnavigation.destination.PDDestination;
/**
* This is the class that represents a link annotation.
*
* @author Ben Litchfield
* @author Paul King
*/
public class PDAnnotationLink extends PDAnnotation
{
private PDAppearanceHandler customAppearanceHandler;
/**
* Constant values of the Text as defined in the PDF 1.6 reference Table 8.19.
*/
public static final String HIGHLIGHT_MODE_NONE = "N";
/**
* Constant values of the Text as defined in the PDF 1.6 reference Table 8.19.
*/
public static final String HIGHLIGHT_MODE_INVERT = "I";
/**
* Constant values of the Text as defined in the PDF 1.6 reference Table 8.19.
*/
public static final String HIGHLIGHT_MODE_OUTLINE = "O";
/**
* Constant values of the Text as defined in the PDF 1.6 reference Table 8.19.
*/
public static final String HIGHLIGHT_MODE_PUSH = "P";
/**
* The type of annotation.
*/
public static final String SUB_TYPE = "Link";
/**
* Constructor.
*/
public PDAnnotationLink()
{
getCOSObject().setName(COSName.SUBTYPE, SUB_TYPE);
}
/**
* Creates a Link annotation from a COSDictionary, expected to be a correct object definition.
*
* @param field the PDF objet to represent as a field.
*/
public PDAnnotationLink(COSDictionary field)
{
super(field);
}
/**
* Get the action to be performed when this annotation is to be activated.
*
* @return The action to be performed when this annotation is activated.
*
* TODO not all annotations have an A entry
*/
public PDAction getAction()
{
COSDictionary action = (COSDictionary) this.getCOSObject().getDictionaryObject(COSName.A);
return PDActionFactory.createAction(action);
}
/**
* Set the annotation action. As of PDF 1.6 this is only used for Widget Annotations
*
* @param action The annotation action. TODO not all annotations have an A entry
*/
public void setAction(PDAction action)
{
this.getCOSObject().setItem(COSName.A, action);
}
/**
* This will set the border style dictionary, specifying the width and dash pattern used in drawing the line.
*
* @param bs the border style dictionary to set. TODO not all annotations may have a BS entry
*
*/
public void setBorderStyle(PDBorderStyleDictionary bs)
{
this.getCOSObject().setItem(COSName.BS, bs);
}
/**
* This will retrieve the border style dictionary, specifying the width and dash pattern used in drawing the line.
*
* @return the border style dictionary.
*/
public PDBorderStyleDictionary getBorderStyle()
{
COSBase bs = this.getCOSObject().getDictionaryObject(COSName.BS);
if (bs instanceof COSDictionary)
{
return new PDBorderStyleDictionary((COSDictionary) bs);
}
return null;
}
/**
* Get the destination to be displayed when the annotation is activated. Either this or the A should be set but not
* both.
*
* @return The destination for this annotation.
*
* @throws IOException If there is an error creating the destination.
*/
public PDDestination getDestination() throws IOException
{
return PDDestination.create(getCOSObject().getDictionaryObject(COSName.DEST));
}
/**
* The new destination value.
*
* @param dest The updated destination.
*/
public void setDestination(PDDestination dest)
{
getCOSObject().setItem(COSName.DEST, dest);
}
/**
* Set the highlight mode for when the mouse is depressed. See the HIGHLIGHT_MODE_XXX constants.
*
* @return The string representation of the highlight mode.
*/
public String getHighlightMode()
{
return getCOSObject().getNameAsString(COSName.H, HIGHLIGHT_MODE_INVERT);
}
/**
* Set the highlight mode. See the HIGHLIGHT_MODE_XXX constants.
*
* @param mode The new highlight mode.
*/
public void setHighlightMode(String mode)
{
getCOSObject().setName(COSName.H, mode);
}
/**
* This will set the previous URI action, in case it needs to be retrieved at later date.
*
* @param pa The previous URI.
*/
public void setPreviousURI(PDActionURI pa)
{
getCOSObject().setItem("PA", pa);
}
/**
* This will set the previous URI action, in case it's needed.
*
* @return The previous URI.
*/
public PDActionURI getPreviousURI()
{
COSDictionary pa = (COSDictionary) getCOSObject().getDictionaryObject("PA");
if (pa != null)
{
return new PDActionURI(pa);
}
return null;
}
/**
* This will set the set of quadpoints which encompass the areas of this annotation which will activate.
*
* @param quadPoints an array representing the set of area covered.
*/
public void setQuadPoints(float[] quadPoints)
{
COSArray newQuadPoints = new COSArray();
newQuadPoints.setFloatArray(quadPoints);
getCOSObject().setItem(COSName.QUADPOINTS, newQuadPoints);
}
/**
* This will retrieve the set of quadpoints which encompass the areas of this annotation which will activate.
*
* @return An array of floats representing the quad points.
*/
public float[] getQuadPoints()
{
return ofNullable(getCOSObject().getDictionaryObject(COSName.QUADPOINTS, COSArray.class))
.map(COSArray::toFloatArray).orElse(null);
}
/**
* Set a custom appearance handler for generating the annotations appearance streams.
*
* @param appearanceHandler
*/
public void setCustomAppearanceHandler(PDAppearanceHandler appearanceHandler)
{
customAppearanceHandler = appearanceHandler;
}
@Override
public void constructAppearances()
{
if (customAppearanceHandler == null)
{
PDLinkAppearanceHandler appearanceHandler = new PDLinkAppearanceHandler(this);
appearanceHandler.generateAppearanceStreams();
}
else
{
customAppearanceHandler.generateAppearanceStreams();
}
}
}
| apache-2.0 |
Richard-Linsdale/createmesh | src/main/java/uk/theretiredprogrammer/createmesh/ObjWriter.java | 7455 | /*
* Copyright 2015-2017 Richard Linsdale.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.theretiredprogrammer.createmesh;
import uk.theretiredprogrammer.createmesh.Point.PointType;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
/**
*
* @author Richard Linsdale - richard at theretiredprogrammer.uk
*/
public class ObjWriter {
private final PrintWriter out;
private int nextvertex = 1;
public ObjWriter(String filepath, String modelname, String mtlpath) throws IOException {
out = new PrintWriter(new BufferedWriter(new FileWriter(filepath)));
out.println("#\n# .obj file (3D mesh for input to blender)\n#");
out.println("o " + modelname + "\nmtllib " + mtlpath);
}
public void close() {
out.println("# end");
out.close();
}
public void writePoints(List<Point> points, double heightScaling) {
out.println("# vertices");
for (Point point : points) {
writeV(point.x, point.y, point.z * heightScaling);
}
}
public void writeTracks(List<Point> points, List<Range> trackranges,
String groupname, String material,
boolean altitudeMode, double heightScaling, double lineThickness) {
out.println("# tracks\ng " + groupname + "\nusemtl " + material);
for (Range range : trackranges) {
for (int i = range.min; i < range.max; i++) {
Point p = points.get(i);
Point n = points.get(i + 1);
Polar a = new Polar(p, n);
double rad = Math.toRadians(a.angle);
double xdelta = Math.cos(rad) * lineThickness / 2;
double ydelta = Math.sin(rad) * lineThickness / 2;
//
int nv = nextvertex;
writeV(p.x - xdelta, p.y - ydelta, altitudeMode ? p.z : 0);
writeV(p.x - ydelta, p.y - xdelta, altitudeMode ? p.z : 0);
writeV(p.x + xdelta, p.y + ydelta, altitudeMode ? p.z : 0);
writeV(n.x + xdelta, n.y + ydelta, altitudeMode ? n.z : 0);
writeV(n.x + ydelta, n.y + xdelta, altitudeMode ? p.z : 0);
writeV(n.x - xdelta, n.y - ydelta, altitudeMode ? n.z : 0);
writeF(new int[]{nv, nv + 1, nv + 2, nv + 3, nv + 4, nv + 5});
}
}
}
private void writeV(double x, double y, double z) {
out.println(String.format("v %9.2f %9.2f %9.2f # %5d", x, y, z, nextvertex++));
}
public void writeFaces(List<Face> faces, String groupname, String material) {
out.println("# faces\ng " + groupname + "\nusemtl " + material);
for (Face face : faces) {
writeFplus(face.facepoints);
}
}
public void writeFaces(List<Face> faces, String groupname, String[] materials,
double colouredstepsize, String landmaterials, double depthreduction) {
out.println("# faces\ng " + groupname);
for (Face face : faces) {
writeMaterial(materials, landmaterials, face.depth, colouredstepsize, depthreduction);
writeFplus(face.facepoints);
}
}
private void writeMaterial(String[] materials, String landmaterials, double mindepth,
double colouredstepsize, double depthreduction) {
// note depth is negative (z) and depth reduction is positive for a reduction
mindepth += depthreduction;
if (mindepth > 0) {
out.println("usemtl " + landmaterials); // set the material to landcolour
return;
}
int depths = materials.length - 1;
for (int i = 0; i < depths; i++){
if (mindepth > -colouredstepsize) {
out.println("usemtl " + materials[i]); // set the material to the gradient colour
return;
}
mindepth += colouredstepsize;
}
out.println("usemtl " + materials[depths]); // set the material to the final gradient colour
}
public void writeMarks(List<Point> marks, String markgroupname, String markmaterial, Double marksize,
String marktailmaterial, double lineThickness,
String refmarkgroupname, String refmarkmaterial, Double refmarksize,
boolean altitudeMode, double heightScaling) {
out.println("# marks\ng " + markgroupname);
for (Point mark : marks) {
if (mark.type == PointType.MARK) {
drawMark(mark, marksize, markmaterial, marktailmaterial,
altitudeMode, heightScaling, lineThickness);
}
}
out.println("# refmarks\ng " + refmarkgroupname);
for (Point mark : marks) {
if (mark.type == PointType.REFERENCE) {
drawMark(mark, refmarksize, refmarkmaterial, marktailmaterial,
altitudeMode, heightScaling, lineThickness);
}
}
}
private void drawMark(Point mark, double size, String mainmaterial, String tailmaterial,
boolean altitudeMode, double heightScaling, double lineThickness) {
int nv = nextvertex;
writeV(mark.x, mark.y, size * 1.414); // top point
writeV(mark.x, mark.y + size, 0);
writeV(mark.x - size * 0.866, mark.y - size * 0.5, 0);
writeV(mark.x + size * 0.866, mark.y - size * 0.5, 0);
if ((!altitudeMode) && (mark.z != 0)) {
// draw the vertical line to depth
out.println("usemtl " + tailmaterial);
int lv = nextvertex;
writeV(mark.x-lineThickness/2, mark.y, mark.z * heightScaling);
writeV(mark.x - lineThickness/2, mark.y, 0.0);
writeV(mark.x + lineThickness/2, mark.y, 0.0);
writeV(mark.x+lineThickness/2, mark.y, mark.z * heightScaling);
writeF(new int[] {lv, lv+1, lv+2, lv+3});
lv = nextvertex;
writeV(mark.x, mark.y-lineThickness/2, mark.z * heightScaling);
writeV(mark.x, mark.y - lineThickness/2, 0.0);
writeV(mark.x, mark.y + lineThickness/2, 0.0);
writeV(mark.x, mark.y+lineThickness/2, mark.z * heightScaling);
writeF(new int[] {lv, lv+1, lv+2, lv+3});
}
out.println("usemtl " + mainmaterial);
writeF(new int[]{nv, nv + 1, nv + 2});
writeF(new int[]{nv, nv + 2, nv + 3});
writeF(new int[]{nv, nv + 3, nv + 1});
}
private void writeF(int[] vs) {
out.print("f ");
for (int i = 0; i < vs.length; i++) {
out.print(vs[i]);
out.print(" ");
}
out.println();
}
private void writeFplus(int[] vs) {
out.print("f ");
for (int i = 0; i < vs.length; i++) {
out.print(vs[i]+1);
out.print(" ");
}
out.println();
}
}
| apache-2.0 |
sonarwhal/sonar | packages/utils/src/misc/normalize-includes.ts | 288 | import { normalizeString } from './normalize-string';
/** Return if normalized `source` string includes normalized `included` string. */
export const normalizeIncludes = (source: string, included: string) => {
return normalizeString(source)!.includes(normalizeString(included)!);
};
| apache-2.0 |
google-research/google-research | homophonous_logography/neural/transformer_model.py | 21315 | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple sequence-to-sequence transformer model.
Loosely based on:
https://blog.tensorflow.org/2019/05/transformer-chatbot-tutorial-with-tensorflow-2.html
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import time
import numpy as np
import tensorflow as tf # tf
import homophonous_logography.neural.corpus as data
import homophonous_logography.neural.utils as utils
tf.config.run_functions_eagerly(False)
tf.compat.v1.disable_eager_execution()
def _create_padding_mask(x):
mask = tf.cast(tf.math.equal(x, 0), tf.float32)
# (batch_size, 1, 1, sequence length)
return mask[:, tf.newaxis, tf.newaxis, :]
def _create_look_ahead_mask(x):
seq_len = tf.shape(x)[1]
look_ahead_mask = 1 - tf.linalg.band_part(tf.ones((seq_len, seq_len)), -1, 0)
padding_mask = _create_padding_mask(x)
return tf.maximum(look_ahead_mask, padding_mask)
def _scaled_dot_product_attention(query, key, value, mask):
"""Actual attention function using dot product."""
matmul_qk = tf.matmul(query, key, transpose_b=True)
depth = tf.cast(tf.shape(key)[-1], tf.float32)
logits = matmul_qk / tf.math.sqrt(depth)
# add the mask zero out padding tokens.
if mask is not None:
logits += (mask * -1e9)
attention_weights = tf.nn.softmax(logits, axis=-1)
return tf.matmul(attention_weights, value), attention_weights
class MultiHeadAttention(tf.keras.layers.Layer):
"""Multi-head attention implementation."""
def __init__(self, d_model, num_heads, name="multi_head_attention"):
super(MultiHeadAttention, self).__init__(name=name)
self.num_heads = num_heads
self.d_model = d_model
assert d_model % self.num_heads == 0
self.depth = d_model // self.num_heads
self.query_dense = tf.keras.layers.Dense(units=d_model)
self.key_dense = tf.keras.layers.Dense(units=d_model)
self.value_dense = tf.keras.layers.Dense(units=d_model)
self.dense = tf.keras.layers.Dense(units=d_model)
def split_heads(self, inputs, batch_size):
inputs = tf.reshape(
inputs, shape=(batch_size, -1, self.num_heads, self.depth))
return tf.transpose(inputs, perm=[0, 2, 1, 3])
def call(self, inputs):
query, key, value, mask = inputs["query"], inputs["key"], inputs[
"value"], inputs["mask"]
batch_size = tf.shape(query)[0]
# linear layers
query = self.query_dense(query)
key = self.key_dense(key)
value = self.value_dense(value)
# split heads
query = self.split_heads(query, batch_size)
key = self.split_heads(key, batch_size)
value = self.split_heads(value, batch_size)
scaled_attention, attention_weights = _scaled_dot_product_attention(
query, key, value, mask)
scaled_attention = tf.transpose(scaled_attention, perm=[0, 2, 1, 3])
concat_attention = tf.reshape(scaled_attention,
(batch_size, -1, self.d_model))
outputs = self.dense(concat_attention)
return outputs, attention_weights
class PositionalEncoding(tf.keras.layers.Layer):
"""Trigonometric positional encoding."""
def __init__(self, position, d_model):
super(PositionalEncoding, self).__init__()
self.pos_encoding = self.positional_encoding(position, d_model)
def get_angles(self, position, i, d_model):
angles = 1 / tf.pow(10000, (2 * (i // 2)) / tf.cast(d_model, tf.float32))
return position * angles
def positional_encoding(self, position, d_model):
angle_rads = self.get_angles(
position=tf.range(position, dtype=tf.float32)[:, tf.newaxis],
i=tf.range(d_model, dtype=tf.float32)[tf.newaxis, :],
d_model=d_model)
# apply sin to even index in the array
sines = tf.math.sin(angle_rads[:, 0::2])
# apply cos to odd index in the array
cosines = tf.math.cos(angle_rads[:, 1::2])
pos_encoding = tf.concat([sines, cosines], axis=-1)
pos_encoding = pos_encoding[tf.newaxis, Ellipsis]
return tf.cast(pos_encoding, tf.float32)
def call(self, inputs):
return inputs + self.pos_encoding[:, :tf.shape(inputs)[1], :]
def _encoder_layer(units, d_model, num_heads, dropout, name="encoder_layer"):
"""One layer of the encoder."""
inputs = tf.keras.Input(shape=(None, d_model), name="inputs")
padding_mask = tf.keras.Input(shape=(1, 1, None), name="padding_mask")
attention, _ = MultiHeadAttention(
d_model, num_heads, name="attention")({
"query": inputs,
"key": inputs,
"value": inputs,
"mask": padding_mask
})
attention = tf.keras.layers.Dropout(rate=dropout)(attention)
attention = tf.keras.layers.LayerNormalization(
epsilon=1e-6)(inputs + attention)
outputs = tf.keras.layers.Dense(units=units, activation="relu")(attention)
outputs = tf.keras.layers.Dense(units=d_model)(outputs)
outputs = tf.keras.layers.Dropout(rate=dropout)(outputs)
outputs = tf.keras.layers.LayerNormalization(
epsilon=1e-6)(attention + outputs)
return tf.keras.Model(
inputs=[inputs, padding_mask], outputs=outputs, name=name)
# Limit the lengths of input sequences.
_MAX_SEQUENCE_LENGTH = 500
def _encoder(vocab_size,
num_layers,
units,
d_model,
num_heads,
dropout,
name="encoder"):
"""Encoder component."""
inputs = tf.keras.Input(shape=(None,), name="inputs")
padding_mask = tf.keras.Input(shape=(1, 1, None), name="padding_mask")
embeddings = tf.keras.layers.Embedding(vocab_size, d_model)(inputs)
embeddings *= tf.math.sqrt(tf.cast(d_model, tf.float32))
embeddings = PositionalEncoding(_MAX_SEQUENCE_LENGTH, d_model)(embeddings)
outputs = tf.keras.layers.Dropout(rate=dropout)(embeddings)
for i in range(num_layers):
outputs = _encoder_layer(
units=units,
d_model=d_model,
num_heads=num_heads,
dropout=dropout,
name="encoder_layer_{}".format(i),
)([outputs, padding_mask])
return tf.keras.Model(
inputs=[inputs, padding_mask], outputs=outputs, name=name)
def _decoder_layer(units, d_model, num_heads, dropout, name="decoder_layer"):
"""Single decoder layer."""
inputs = tf.keras.Input(shape=(None, d_model), name="inputs")
enc_outputs = tf.keras.Input(shape=(None, d_model), name="encoder_outputs")
look_ahead_mask = tf.keras.Input(
shape=(1, None, None), name="look_ahead_mask")
padding_mask = tf.keras.Input(shape=(1, 1, None), name="padding_mask")
attention1, attention_weights_block1 = MultiHeadAttention(
d_model, num_heads, name="attention_1")(inputs={
"query": inputs,
"key": inputs,
"value": inputs,
"mask": look_ahead_mask
})
attention1 = tf.keras.layers.LayerNormalization(
epsilon=1e-6)(attention1 + inputs)
attention2, attention_weights_block2 = MultiHeadAttention(
d_model, num_heads, name="attention_2")(inputs={
"query": attention1,
"key": enc_outputs,
"value": enc_outputs,
"mask": padding_mask
})
attention2 = tf.keras.layers.Dropout(rate=dropout)(attention2)
attention2 = tf.keras.layers.LayerNormalization(
epsilon=1e-6)(attention2 + attention1)
outputs = tf.keras.layers.Dense(units=units, activation="relu")(attention2)
outputs = tf.keras.layers.Dense(units=d_model)(outputs)
outputs = tf.keras.layers.Dropout(rate=dropout)(outputs)
outputs = tf.keras.layers.LayerNormalization(
epsilon=1e-6)(outputs + attention2)
return tf.keras.Model(
inputs=[inputs, enc_outputs, look_ahead_mask, padding_mask],
outputs=[outputs, attention_weights_block1, attention_weights_block2],
name=name)
def _decoder(vocab_size,
num_layers,
units,
d_model,
num_heads,
dropout,
name="decoder"):
"""Decoder component."""
inputs = tf.keras.Input(shape=(None,), name="inputs")
enc_outputs = tf.keras.Input(shape=(None, d_model), name="encoder_outputs")
look_ahead_mask = tf.keras.Input(
shape=(1, None, None), name="look_ahead_mask")
padding_mask = tf.keras.Input(shape=(1, 1, None), name="padding_mask")
embeddings = tf.keras.layers.Embedding(vocab_size, d_model)(inputs)
embeddings *= tf.math.sqrt(tf.cast(d_model, tf.float32))
embeddings = PositionalEncoding(_MAX_SEQUENCE_LENGTH, d_model)(embeddings)
outputs = tf.keras.layers.Dropout(rate=dropout)(embeddings)
attention_weights = {}
for i in range(num_layers):
outputs, attn_w_block1, attn_w_block2 = _decoder_layer(
units=units,
d_model=d_model,
num_heads=num_heads,
dropout=dropout,
name="decoder_layer_{}".format(i),
)(inputs=[outputs, enc_outputs, look_ahead_mask, padding_mask])
attention_weights["decoder_layer{}_block1".format(i+1)] = attn_w_block1
attention_weights["decoder_layer{}_block2".format(i+1)] = attn_w_block2
return tf.keras.Model(
inputs=[inputs, enc_outputs, look_ahead_mask, padding_mask],
outputs=[outputs, attention_weights],
name=name)
def _transformer(input_vocab_size,
target_vocab_size,
num_layers,
units,
d_model,
num_heads,
dropout,
name="transformer"):
"""Transformer network."""
inputs = tf.keras.Input(shape=(None,), name="inputs")
dec_inputs = tf.keras.Input(shape=(None,), name="dec_inputs")
enc_padding_mask = tf.keras.layers.Lambda(
_create_padding_mask, output_shape=(1, 1, None),
name="enc_padding_mask")(inputs)
# mask the future tokens for decoder inputs at the 1st attention block
look_ahead_mask = tf.keras.layers.Lambda(
_create_look_ahead_mask,
output_shape=(1, None, None),
name="look_ahead_mask")(dec_inputs)
# mask the encoder outputs for the 2nd attention block
dec_padding_mask = tf.keras.layers.Lambda(
_create_padding_mask, output_shape=(1, 1, None),
name="dec_padding_mask")(inputs)
enc_outputs = _encoder(
vocab_size=input_vocab_size,
num_layers=num_layers,
units=units,
d_model=d_model,
num_heads=num_heads,
dropout=dropout,
)(inputs=[inputs, enc_padding_mask])
dec_outputs, attention_weights = _decoder(
vocab_size=target_vocab_size,
num_layers=num_layers,
units=units,
d_model=d_model,
num_heads=num_heads,
dropout=dropout,
)(inputs=[dec_inputs, enc_outputs, look_ahead_mask, dec_padding_mask])
outputs = tf.keras.layers.Dense(units=target_vocab_size, name="outputs")(
dec_outputs)
model = tf.keras.Model(inputs=[inputs, dec_inputs],
outputs=[outputs, attention_weights], name=name)
model.summary()
return model
class CustomSchedule(tf.keras.optimizers.schedules.LearningRateSchedule):
"""Learning rate schedule."""
def __init__(self, d_model, warmup_steps=4000):
super(CustomSchedule, self).__init__()
self.d_model = d_model
self.d_model = tf.cast(self.d_model, tf.float32)
self.warmup_steps = warmup_steps
def __call__(self, step):
arg1 = tf.math.rsqrt(step)
arg2 = step * (self.warmup_steps ** -1.5)
return tf.math.rsqrt(self.d_model) * tf.math.minimum(arg1, arg2)
_TRAIN_STEP_SIGNATURE = [
tf.TensorSpec(shape=(None, None), dtype=tf.int32),
tf.TensorSpec(shape=(None, None), dtype=tf.int32),
]
class Seq2SeqTransformerModel(object):
"""Full transformer model."""
def __init__(self,
batch_size=64,
num_heads=8,
ff_dim=512,
num_layers=4,
model_dim=128,
input_symbols=None,
output_symbols=None,
multihead_retrieval_strategy="AVERAGE",
model_dir=".",
name="model"):
self._batch_size = batch_size
self._input_symbols = input_symbols
self._input_vocab_size = len(input_symbols)
self._output_symbols = output_symbols
self._output_vocab_size = len(output_symbols)
self._num_heads = num_heads
self._num_layers = num_layers
self._multihead_retrieval = multihead_retrieval_strategy
self._transformer = _transformer(
input_vocab_size=self._input_vocab_size,
target_vocab_size=self._output_vocab_size,
num_layers=num_layers,
units=ff_dim,
d_model=model_dim,
num_heads=num_heads,
dropout=0.1)
self._learning_rate = CustomSchedule(model_dim)
self._optimizer = tf.keras.optimizers.Adam(
self._learning_rate, beta_1=0.9, beta_2=0.98, epsilon=1e-9)
self._loss_object = tf.keras.losses.SparseCategoricalCrossentropy(
from_logits=True, reduction="none")
self._train_accuracy = tf.keras.metrics.Mean(name="train_accuracy")
self._name = name
self._checkpoint_dir = os.path.join(model_dir, self._name)
self._checkpoint_prefix = os.path.join(self._checkpoint_dir, "ckpt")
self._checkpoint = tf.train.Checkpoint(optimizer=self._optimizer,
transformer=self._transformer)
# Length of the current output tensor (for eval).
self._input_length = -1
self._output_length = -1
def _loss_function(self, y_true, y_pred):
loss = self._loss_object(y_true, y_pred)
mask = tf.cast(tf.not_equal(y_true, 0), tf.float32)
loss = tf.multiply(loss, mask)
return tf.reduce_mean(loss)
def _accuracy_function(self, real, pred):
accuracies = tf.equal(real, tf.argmax(pred, output_type=tf.int32, axis=2))
mask = tf.math.logical_not(tf.math.equal(real, 0))
accuracies = tf.math.logical_and(mask, accuracies)
accuracies = tf.cast(accuracies, dtype=tf.float32)
mask = tf.cast(mask, dtype=tf.float32)
return tf.reduce_sum(accuracies) / tf.reduce_sum(mask)
@tf.function(input_signature=_TRAIN_STEP_SIGNATURE)
def _train_step(self, inputs, targets):
"""One step of the training."""
target_inputs = targets[:, :-1]
target_real = targets[:, 1:]
with tf.GradientTape() as tape:
predictions, _ = self._transformer(
inputs=[inputs, target_inputs], training=True)
loss = self._loss_function(target_real, predictions)
gradients = tape.gradient(loss, self._transformer.trainable_variables)
self._optimizer.apply_gradients(zip(gradients,
self._transformer.trainable_variables))
self._train_accuracy(self._accuracy_function(target_real, predictions))
return loss
def train(self, corpus, epochs=10, direction="pronounce", window=-1):
"""Runs training."""
# Create training log that also redirects to stdout.
stdout_file = sys.stdout
logfile = os.path.join(self._checkpoint_dir, "train.log")
print("Training log: {}".format(logfile))
sys.stdout = utils.DualLogger(logfile)
# Dump some parameters.
print(" Direction: {}".format(direction))
print(" # Epochs: {}".format(epochs))
print(" Batch size: {}".format(self._batch_size))
print(" Window size: {}".format(window))
print(" Max written len: {}".format(corpus.max_written_len))
print(" Max pron len: {}".format(corpus.max_pronounce_len))
print("Max written word len: {}".format(corpus.max_written_word_len))
print(" Max pron word len: {}".format(corpus.max_pronounce_word_len))
# Perform training.
best_total_loss = 1000000
nbatches = data.num_batches(corpus, self._batch_size, direction=direction,
window=window)
for epoch in range(epochs):
self._train_accuracy.reset_states()
start = time.time()
total_loss = 0
steps = 0
batches = data.batchify(corpus, self._batch_size, direction,
window=window)
batch, (inputs, targ) = next(batches)
while batch > -1:
bos = np.expand_dims(
[self._output_symbols.find("<s>")] * np.shape(targ)[0], 1)
targets = np.concatenate((bos, targ), axis=-1)
batch_loss = self._train_step(inputs, targets)
total_loss += batch_loss
if batch % 10 == 0:
print("Epoch {} Batch {} (/{}) Loss {:.4f}".format(
epoch + 1,
batch,
nbatches,
batch_loss))
steps += 1
batch, (inputs, targ) = next(batches)
total_loss /= steps
print("Epoch {} Loss {:.4f} Accuracy {:.4f}".format(
epoch + 1, total_loss, self._train_accuracy.result()))
if total_loss < best_total_loss:
self._checkpoint.save(file_prefix=self._checkpoint_prefix)
print("Saved checkpoint to {}".format(self._checkpoint_prefix))
best_total_loss = total_loss
print("Time taken for 1 epoch {} sec\n".format(
time.time() - start))
print("Best total loss: {:.4f}".format(best_total_loss))
# Restore stdout.
sys.stdout = stdout_file
def _get_attention(self, attention_weights):
"""Prepare attention for consumption.
Args:
attention_weights: tensor with shape:
(batch=1, num_heads, seq_len_q, seq_len_k).
Returns:
Accumulated attention.
"""
attention_heads = tf.squeeze( # Remove batch dimension.
attention_weights["decoder_layer%d_block2" % self._num_layers], 0)
# Basic sanity checks.
if len(attention_heads) != self._num_heads:
raise ValueError("Invalid number of attention heads: {}".format(
len(attention_heads)))
if len(attention_heads.shape) != 3:
raise ValueError("Invalid shape of attention weights: {}".format(
len(attention_heads.shape)))
if attention_heads.shape[1] > self._output_length:
raise ValueError("Expected output length <= {} for dim 1, got {}".format(
self._output_length, attention_heads.shape[1]))
elif attention_heads.shape[1] < self._output_length:
output_len_diff = self._output_length - attention_heads.shape[1]
attention_heads = tf.pad(attention_heads,
[[0, 0], [0, output_len_diff], [0, 0]])
if attention_heads.shape[2] != self._input_length:
raise ValueError("Expected input length {} for dim 2, got {}".format(
self._input_length, attention_heads.shape[2]))
# Combine.
if self._multihead_retrieval == "AVERAGE":
attention = tf.reduce_sum(attention_heads, axis=0) / self._num_heads
elif self._multihead_retrieval == "MAX":
attention = tf.reduce_max(attention_heads, axis=0)
else:
raise ValueError("Unknown retrieval strategy: {}".format(
self._multihead_retrieval))
return attention
@tf.function(experimental_relax_shapes=True)
def _predict_step(self, encoder_input, output):
"""One prediction step."""
return self._transformer(
inputs=[encoder_input, output], training=False)
def decode(self, inputs, joiner=""):
"""Decodes the inputs."""
encoder_input = tf.convert_to_tensor([inputs], dtype=tf.int32)
# The first input to the transformer will be the start token.
start = [self._output_symbols.find("<s>")]
output = tf.convert_to_tensor(start, dtype=tf.int32)
output = tf.expand_dims(output, 0)
result = []
for _ in range(self._output_length):
# predictions.shape == (batch_size, seq_len, vocab_size)
predictions, attention_weights = self._predict_step(
encoder_input, output)
# select the last word from the seq_len dimension
predictions = predictions[:, -1:, :] # (batch_size, 1, vocab_size)
predicted_id = tf.argmax(predictions, axis=-1, output_type=tf.int32)
# concatentate the predicted_id to the output which is given to the
# decoder as its input.
output = tf.concat([output, predicted_id], axis=-1)
outsym = self._output_symbols.find(int(predicted_id.numpy()))
if outsym == "</s>" or outsym == "</targ>":
break
else:
result.append(outsym)
# Accumulate attention over all the heads.
attention = self._get_attention(attention_weights)
return joiner.join(result), attention.numpy()
def update_property(self, property_name, value):
setattr(self, property_name, value)
@property
def checkpoint(self):
return self._checkpoint
@property
def checkpoint_dir(self):
return self._checkpoint_dir
@property
def input_symbols(self):
return self._input_symbols
@property
def output_symbols(self):
return self._output_symbols
@property
def input_length(self):
return self._input_length
@property
def eval_mode(self):
return "_%s" % self._multihead_retrieval.lower()
| apache-2.0 |
ctr-lang/ctr | __tests__/cases-api/basic/variable/special/private-variable-set.js | 397 | const CTR = require('ctr').js;
const ctr = new CTR();
ctr.setOption({
privateVariable: true
});
ctr.setVar({
width: '99999px',
height: '99999px',
background: 'red'
});
ctr.create('.test', {
$$: {
width: '200px',
height: '400px'
},
width: '_$width$_',
height: '_$height$_',
background: '$background$'
});
const res = ctr.getRes();
module.exports = {
res: res
};
| apache-2.0 |
tectronics/proteocloud | src/main/java/com/compomics/proteocloud/ui/DenovoResultPanel.java | 39748 | package com.compomics.proteocloud.ui;
import javax.swing.*;
import com.compomics.proteocloud.db.accessor.Pepnovohit;
import com.compomics.proteocloud.io.MascotGenericFile;
import com.compomics.proteocloud.model.DenovoHitSet;
import com.compomics.proteocloud.model.DenovoSearchResult;
import com.compomics.proteocloud.sqs.MessageManager;
import com.compomics.proteocloud.starter.CloudTools;
import com.compomics.proteocloud.util.*;
import com.compomics.util.experiment.biology.Ion;
import com.compomics.util.experiment.biology.IonFactory;
import com.compomics.util.experiment.biology.NeutralLoss;
import com.compomics.util.experiment.biology.Peptide;
import com.compomics.util.experiment.biology.ions.PeptideFragmentIon;
import com.compomics.util.experiment.identification.SpectrumAnnotator;
import com.compomics.util.experiment.identification.matches.IonMatch;
import com.compomics.util.experiment.identification.matches.ModificationMatch;
import com.compomics.util.experiment.identification.matches.PeptideMatch;
import com.compomics.util.experiment.massspectrometry.Charge;
import com.compomics.util.experiment.massspectrometry.MSnSpectrum;
import com.compomics.util.experiment.massspectrometry.Peak;
import com.compomics.util.experiment.massspectrometry.Precursor;
import com.compomics.util.gui.protein.ModificationProfile;
import com.compomics.util.gui.protein.SequenceModificationPanel;
import com.compomics.util.gui.spectrum.SpectrumPanel;
import com.compomics.util.preferences.AnnotationPreferences;
import com.jgoodies.forms.layout.CellConstraints;
import com.jgoodies.forms.layout.FormLayout;
import com.xerox.amazonws.sqs2.SQSException;
import no.uib.jsparklines.renderers.JSparklinesBarChartTableCellRenderer;
import org.jdesktop.swingx.JXTable;
import org.jdesktop.swingx.JXTitledPanel;
import org.jdesktop.swingx.decorator.HighlighterFactory;
import org.jfree.chart.plot.PlotOrientation;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.JTableHeader;
import javax.swing.table.TableColumnModel;
import javax.swing.table.TableModel;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.io.IOException;
import java.sql.SQLException;
import java.util.*;
import java.util.List;
public class DenovoResultPanel extends JPanel {
private JXTable spectraTbl;
private JXTable denovoTbl;
private Map<String, DenovoHitSet> titleToSetMap;
private SpectrumPanel spectrumPanel;
private JPanel spectrumJPanel;
private CellConstraints cc;
private ProteoCloudUI mainFrame;
private Peptide peptide;
private List<String> denovoTableToolTips;
private List<String> spectraTableToolTips;
private JMenuBar annotationMenuBar;
private JMenu ionsMenu;
private JCheckBoxMenuItem aIonCheckBoxMenuItem;
private JCheckBoxMenuItem bIonCheckBoxMenuItem;
private JCheckBoxMenuItem cIonCheckBoxMenuItem;
private JCheckBoxMenuItem xIonCheckBoxMenuItem;
private JCheckBoxMenuItem yIonCheckBoxMenuItem;
private JCheckBoxMenuItem zIonCheckBoxMenuItem;
private JMenu splitterMenu1;
private JMenu splitterMenu2;
private JMenu splitterMenu3;
private JMenu splitterMenu4;
private JMenu splitterMenu5;
private JMenu splitterMenu6;
private JMenu splitterMenu7;
private JMenu otherMenu;
private JMenu lossMenu;
private JMenu settingsMenu;
private JMenu chargeMenu;
private JMenu deNovoMenu;
private JCheckBoxMenuItem reporterIonsCheckMenuItem;
private JCheckBoxMenuItem precursorCheckMenuItem;
private JCheckBoxMenuItem immoniumIonsCheckMenuItem;
private JCheckBoxMenuItem forwardIonsDeNovoCheckBoxMenuItem;
private JCheckBoxMenuItem rewindIonsDeNovoCheckBoxMenuItem;
private JCheckBoxMenuItem allCheckBoxMenuItem;
private JCheckBoxMenuItem barsCheckBoxMenuItem;
private ButtonGroup deNovoChargeButtonGroup;
private JCheckBoxMenuItem deNovoChargeOneJRadioButtonMenuItem;
private JCheckBoxMenuItem deNovoChargeTwoJRadioButtonMenuItem;
/**
* The annotation preferences.
*/
private AnnotationPreferences annotationPreferences = new AnnotationPreferences();
/**
* The charge menus.
*/
private HashMap<Integer, JCheckBoxMenuItem> chargeMenus = new HashMap<Integer, JCheckBoxMenuItem>();
/**
* The neutral loss menus.
*/
private HashMap<NeutralLoss, JCheckBoxMenuItem> lossMenus = new HashMap<NeutralLoss, JCheckBoxMenuItem>();
/**
* The selected spectrum.
*/
private MascotGenericFile selectedSpectrum;
/**
* Constructs the DenovoResultPanel.
*
* @param mainFrame
*/
public DenovoResultPanel(ProteoCloudUI mainFrame) {
this.mainFrame = mainFrame;
initComponents();
// Add neutral losses.
IonFactory.getInstance().addDefaultNeutralLoss(NeutralLoss.NH3);
IonFactory.getInstance().addDefaultNeutralLoss(NeutralLoss.H2O);
}
/**
* Initialize the components.
*/
private void initComponents() {
cc = new CellConstraints();
// Setup the annotation menu bar.
setupAnnotationMenuBar();
// Build the spectrum overview panel
JPanel spectrumOverviewPnl = new JPanel(new BorderLayout());
spectrumJPanel = new JPanel();
spectrumJPanel.setLayout(new BorderLayout());
spectrumJPanel.setBorder(BorderFactory.createEmptyBorder(8, 8, 8, 8));
spectrumJPanel.add(new SpectrumPanel(new double[]{0.0, 100.0}, new double[]{100.0, 0.0}, 0.0, "", ""));
spectrumJPanel.setPreferredSize(new Dimension(100, 320));
spectrumOverviewPnl.add(spectrumJPanel);
spectrumOverviewPnl.add(annotationMenuBar, BorderLayout.SOUTH);
spectrumOverviewPnl.setBorder(BorderFactory.createTitledBorder("Spectrum Viewer"));
// Setup the tables
setupSpectraTableProperties();
setupDenovoTableProperties();
JScrollPane querySpectraTblPane = new JScrollPane(spectraTbl);
querySpectraTblPane.setPreferredSize(new Dimension(430, 200));
this.setLayout(new FormLayout("5dlu, p:g, 5dlu, p:g, 5dlu", "5dlu, f:p:g, 5dlu, f:p:g, 5dlu"));
// Choose your spectra
JPanel resultsSpectrumPnl = new JPanel();
resultsSpectrumPnl.setBorder(BorderFactory.createTitledBorder("Query Spectra"));
resultsSpectrumPnl.setLayout(new FormLayout("5dlu, p:g, 5dlu", "5dlu, t:p:g, 5dlu,"));
resultsSpectrumPnl.add(querySpectraTblPane, cc.xy(2, 2));
// De novo results
JPanel resultHitsPnl = new JPanel();
resultHitsPnl.setLayout(new FormLayout("5dlu, p:g, 5dlu", "5dlu, t:p:g, 5dlu"));
JScrollPane denovoHitsPane = new JScrollPane(denovoTbl);
denovoHitsPane.setPreferredSize(new Dimension(570, 200));
denovoHitsPane.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS);
denovoHitsPane.setToolTipText("Select spectra");
resultHitsPnl.add(denovoHitsPane, cc.xy(2, 2));
resultHitsPnl.setBorder(BorderFactory.createTitledBorder("De Novo Hits"));
// Update the annotations preferences once
annotationPreferences.useAutomaticAnnotation(true);
updateAnnotationPreferences();
this.add(spectrumOverviewPnl, cc.xyw(2, 2, 3));
this.add(resultsSpectrumPnl, cc.xy(2, 4));
this.add(resultHitsPnl, cc.xy(4, 4));
}
/**
* This methods setups the annotation menu bar.
*/
private void setupAnnotationMenuBar() {
annotationMenuBar = new JMenuBar();
splitterMenu1 = new JMenu();
splitterMenu2 = new JMenu();
splitterMenu3 = new JMenu();
splitterMenu4 = new JMenu();
splitterMenu5 = new JMenu();
splitterMenu6 = new JMenu();
splitterMenu7 = new JMenu();
ionsMenu = new JMenu();
aIonCheckBoxMenuItem = new JCheckBoxMenuItem();
bIonCheckBoxMenuItem = new JCheckBoxMenuItem();
cIonCheckBoxMenuItem = new JCheckBoxMenuItem();
xIonCheckBoxMenuItem = new JCheckBoxMenuItem();
yIonCheckBoxMenuItem = new JCheckBoxMenuItem();
zIonCheckBoxMenuItem = new JCheckBoxMenuItem();
precursorCheckMenuItem = new JCheckBoxMenuItem();
reporterIonsCheckMenuItem = new JCheckBoxMenuItem();
immoniumIonsCheckMenuItem = new JCheckBoxMenuItem();
forwardIonsDeNovoCheckBoxMenuItem = new JCheckBoxMenuItem();
rewindIonsDeNovoCheckBoxMenuItem = new JCheckBoxMenuItem();
deNovoChargeOneJRadioButtonMenuItem = new JCheckBoxMenuItem();
deNovoChargeTwoJRadioButtonMenuItem = new JCheckBoxMenuItem();
allCheckBoxMenuItem = new JCheckBoxMenuItem();
barsCheckBoxMenuItem = new JCheckBoxMenuItem();
otherMenu = new JMenu();
lossMenu = new JMenu();
chargeMenu = new JMenu();
deNovoMenu = new JMenu();
settingsMenu = new JMenu();
deNovoChargeButtonGroup = new ButtonGroup();
annotationMenuBar.setBorder(javax.swing.BorderFactory.createEtchedBorder());
annotationMenuBar.setOpaque(false);
splitterMenu1.setText("|");
splitterMenu1.setEnabled(false);
annotationMenuBar.add(splitterMenu1);
ionsMenu.setText("Ions");
ionsMenu.setEnabled(false);
aIonCheckBoxMenuItem.setText("a");
aIonCheckBoxMenuItem.setToolTipText("a-ions");
aIonCheckBoxMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
ionsMenu.add(aIonCheckBoxMenuItem);
bIonCheckBoxMenuItem.setText("b");
bIonCheckBoxMenuItem.setSelected(true);
bIonCheckBoxMenuItem.setToolTipText("b-ions");
bIonCheckBoxMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
ionsMenu.add(bIonCheckBoxMenuItem);
cIonCheckBoxMenuItem.setText("c");
cIonCheckBoxMenuItem.setToolTipText("c-ions");
cIonCheckBoxMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
ionsMenu.add(cIonCheckBoxMenuItem);
xIonCheckBoxMenuItem.setText("x");
xIonCheckBoxMenuItem.setToolTipText("x-ions");
xIonCheckBoxMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
ionsMenu.add(xIonCheckBoxMenuItem);
yIonCheckBoxMenuItem.setText("y");
yIonCheckBoxMenuItem.setSelected(true);
yIonCheckBoxMenuItem.setToolTipText("y-ions");
yIonCheckBoxMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
ionsMenu.add(yIonCheckBoxMenuItem);
zIonCheckBoxMenuItem.setText("z");
zIonCheckBoxMenuItem.setToolTipText("z-ions");
zIonCheckBoxMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
ionsMenu.add(zIonCheckBoxMenuItem);
annotationMenuBar.add(ionsMenu);
splitterMenu2.setText("|");
splitterMenu2.setEnabled(false);
annotationMenuBar.add(splitterMenu2);
otherMenu.setText("Other");
otherMenu.setEnabled(true);
precursorCheckMenuItem.setSelected(true);
precursorCheckMenuItem.setText("Precursor");
precursorCheckMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
otherMenu.add(precursorCheckMenuItem);
immoniumIonsCheckMenuItem.setSelected(true);
immoniumIonsCheckMenuItem.setText("Immonium");
immoniumIonsCheckMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
otherMenu.add(immoniumIonsCheckMenuItem);
annotationMenuBar.add(otherMenu);
splitterMenu3.setText("|");
splitterMenu3.setEnabled(false);
annotationMenuBar.add(splitterMenu3);
lossMenu.setText("Loss");
lossMenu.setEnabled(true);
lossMenu.add(new JSeparator());
annotationMenuBar.add(lossMenu);
splitterMenu4.setText("|");
splitterMenu4.setEnabled(false);
annotationMenuBar.add(splitterMenu4);
chargeMenu.setText("Charge");
chargeMenu.setEnabled(true);
annotationMenuBar.add(chargeMenu);
splitterMenu5.setText("|");
splitterMenu5.setEnabled(false);
annotationMenuBar.add(splitterMenu5);
deNovoMenu.setText("De Novo");
forwardIonsDeNovoCheckBoxMenuItem.setText("f-ions");
forwardIonsDeNovoCheckBoxMenuItem.setSelected(true);
forwardIonsDeNovoCheckBoxMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
deNovoMenu.add(forwardIonsDeNovoCheckBoxMenuItem);
rewindIonsDeNovoCheckBoxMenuItem.setText("r-ions");
rewindIonsDeNovoCheckBoxMenuItem.setSelected(true);
rewindIonsDeNovoCheckBoxMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
deNovoMenu.add(rewindIonsDeNovoCheckBoxMenuItem);
deNovoMenu.add(new JSeparator());
deNovoChargeButtonGroup.add(deNovoChargeOneJRadioButtonMenuItem);
deNovoChargeOneJRadioButtonMenuItem.setSelected(true);
deNovoChargeOneJRadioButtonMenuItem.setText("Single Charge");
deNovoChargeOneJRadioButtonMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
deNovoMenu.add(deNovoChargeOneJRadioButtonMenuItem);
deNovoChargeButtonGroup.add(deNovoChargeTwoJRadioButtonMenuItem);
deNovoChargeTwoJRadioButtonMenuItem.setText("Double Charge");
deNovoChargeTwoJRadioButtonMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
deNovoMenu.add(deNovoChargeTwoJRadioButtonMenuItem);
annotationMenuBar.add(deNovoMenu);
splitterMenu6.setText("|");
splitterMenu6.setEnabled(false);
annotationMenuBar.add(splitterMenu6);
settingsMenu.setText("Settings");
settingsMenu.setEnabled(true);
allCheckBoxMenuItem.setText("Show All Peaks");
allCheckBoxMenuItem.setToolTipText("Show all peaks or just the annotated peaks");
allCheckBoxMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
updateAnnotationPreferences();
}
});
settingsMenu.add(allCheckBoxMenuItem);
annotationMenuBar.add(settingsMenu);
splitterMenu7.setText("|");
splitterMenu7.setEnabled(false);
annotationMenuBar.add(splitterMenu7);
annotationMenuBar.setBorder(null);
}
/**
* Enables the annotation menu bar.
*/
private void enableAnnotationMenuBar() {
ionsMenu.setEnabled(true);
}
/**
* This method prepares the denovo hits table.
*/
private void setupDenovoTableProperties() {
final DefaultTableModel denovoTblMdl = new DefaultTableModel() {
// instance initializer block
{
setColumnIdentifiers(new Object[]{"#", "Peptide", "RankScore", "Score", "N-Gap", "C-Gap", "m/z", "Charge"});
}
public boolean isCellEditable(int row, int col) {
return false;
}
public Class<?> getColumnClass(int columnIndex) {
switch (columnIndex) {
case 0:
case 7:
return Integer.class;
case 2:
case 3:
case 4:
case 5:
case 6:
return Double.class;
default:
return String.class;
}
}
};
denovoTableToolTips = new ArrayList<String>();
denovoTableToolTips.add("Peptide Index");
denovoTableToolTips.add("Peptide Sequence");
denovoTableToolTips.add("Ranking Score");
denovoTableToolTips.add("PepNovo Score of Peptide Sequence");
denovoTableToolTips.add("Mass Gap from N-terminal to Sequence Start");
denovoTableToolTips.add("Mass Gap from C-terminal to Sequence End");
denovoTableToolTips.add("Peptide m/z");
denovoTableToolTips.add("Peptide Charge");
denovoTbl = new JXTable(denovoTblMdl) {
protected JTableHeader createDefaultTableHeader() {
return new JTableHeader(columnModel) {
public String getToolTipText(MouseEvent e) {
java.awt.Point p = e.getPoint();
int index = columnModel.getColumnIndexAtX(p.x);
int realIndex = columnModel.getColumn(index).getModelIndex();
String tip = (String) denovoTableToolTips.get(realIndex);
return tip;
}
};
}
};
// JSparklines for Scoring
denovoTbl.getColumn("Score").setCellRenderer(new JSparklinesBarChartTableCellRenderer(PlotOrientation.HORIZONTAL, 100.0, new Color(110, 196, 97)));
((JSparklinesBarChartTableCellRenderer) denovoTbl.getColumn("Score").getCellRenderer()).showNumberAndChart(true, 50, new Font("Arial", Font.PLAIN, 12), 0);
TableColumnModel tcm = denovoTbl.getColumnModel();
tcm.getColumn(0).setCellRenderer(new CustomTableCellRenderer(SwingConstants.RIGHT));
tcm.getColumn(0).setMinWidth(40);
tcm.getColumn(0).setMaxWidth(40);
tcm.getColumn(1).setMinWidth(170);
tcm.getColumn(1).setMaxWidth(170);
tcm.getColumn(3).setMinWidth(100);
tcm.getColumn(3).setMaxWidth(100);
// Sort the peptide table by the number of peptide hits
denovoTbl.setAutoCreateRowSorter(true);
// register list selection listener
denovoTbl.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
public void valueChanged(ListSelectionEvent e) {
refreshSpectrumPanel();
}
});
// Single selection only
denovoTbl.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
denovoTbl.setSelectionBackground(new Color(130, 207, 250));
// Add nice striping effect
denovoTbl.addHighlighter(HighlighterFactory.createAlternateStriping());
// Column control
denovoTbl.setColumnControlVisible(true);
}
/**
* This method sets the spectra table up.
*/
private void setupSpectraTableProperties() {
// Query table
final TableModel spectraTableMdl = new DefaultTableModel() {
// instance initializer block
{
setColumnIdentifiers(new Object[]{"#", "Spectrum Title"});
}
public boolean isCellEditable(int row, int col) {
return false;
}
public Class<?> getColumnClass(int columnIndex) {
switch (columnIndex) {
case 0:
return Integer.class;
default:
return String.class;
}
}
};
spectraTableToolTips = new ArrayList<String>();
spectraTableToolTips.add("Spectrum Index");
spectraTableToolTips.add("Spectrum Title");
spectraTbl = new JXTable(spectraTableMdl) {
protected JTableHeader createDefaultTableHeader() {
return new JTableHeader(columnModel) {
public String getToolTipText(MouseEvent e) {
java.awt.Point p = e.getPoint();
int index = columnModel.getColumnIndexAtX(p.x);
int realIndex = columnModel.getColumn(index).getModelIndex();
String tip = (String) spectraTableToolTips.get(realIndex);
return tip;
}
};
}
};
TableColumnModel tcm = spectraTbl.getColumnModel();
tcm.getColumn(0).setCellRenderer(new CustomTableCellRenderer(SwingConstants.RIGHT));
tcm.getColumn(0).setMinWidth(40);
tcm.getColumn(0).setMaxWidth(40);
// Sort the peptide table by the number of peptide hits
spectraTbl.setAutoCreateRowSorter(true);
// register list selection listener
spectraTbl.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
public void valueChanged(ListSelectionEvent e) {
refreshSpectrumPanel();
refreshDenovoTable();
}
});
// Single selection only
spectraTbl.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
// Add striping effect
spectraTbl.addHighlighter(HighlighterFactory.createAlternateStriping());
// Column control
spectraTbl.setColumnControlVisible(true);
}
/**
* Updates the spectrum panel.
*/
public void refreshSpectrumPanel() {
int row = spectraTbl.getSelectedRow();
if (row != -1 && !titleToSetMap.isEmpty() ) {
// The spectrum id.
try {
selectedSpectrum = titleToSetMap.get(spectraTbl.getValueAt(row, spectraTbl.convertColumnIndexToView(1))).getMascotGenericFile();
ArrayList<Charge> precursorCharges = new ArrayList<Charge>();
precursorCharges.add(new Charge(selectedSpectrum.getCharge(), Charge.PLUS));
HashMap<Double,Peak> peakMap = new HashMap<Double, Peak>();
HashMap<Double,Double> mgfPeaks = selectedSpectrum.getPeaks();
Iterator<Double> iterator = mgfPeaks.keySet().iterator();
while (iterator.hasNext()) {
Double mass = iterator.next();
peakMap.put(mass, new Peak(mass, mgfPeaks.get(mass)));
}
MSnSpectrum currentSpectrum = new MSnSpectrum(2, new Precursor(0.0, selectedSpectrum.getPrecursorMZ(), precursorCharges), "no title", peakMap, "no filename");
spectrumPanel = new SpectrumPanel(
currentSpectrum.getMzValuesAsArray(), currentSpectrum.getIntensityValuesAsArray(),
selectedSpectrum.getPrecursorMZ(), selectedSpectrum.getCharge() + "+",
"", 40, false, false, false, 2, false);
updateAnnotationMenus(selectedSpectrum.getCharge());
// Update the annotations
updateAnnotations();
spectrumJPanel.removeAll();
spectrumJPanel.add(spectrumPanel);
spectrumJPanel.revalidate();
spectrumJPanel.repaint();
} catch (SQLException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
} catch (IOException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
}
/**
* Updates the annotations.
*/
private void updateAnnotations() {
int row = denovoTbl.getSelectedRow();
if (row != -1) {
Pepnovohit hit = titleToSetMap.get(selectedSpectrum.getTitle()).getDenovoHits().get(row);
if (hit != null) {
addAnnotations(hit);
}
}
}
/**
* Update the denovo hit table based on the spectrum selected via mouse click.
*/
private void refreshDenovoTable() {
// Set the cursor into the wait status.
this.setCursor(new Cursor(Cursor.WAIT_CURSOR));
((DefaultTableModel) denovoTbl.getModel()).setRowCount(0);
int row = spectraTbl.getSelectedRow();
// Condition if one row is selected.
if (row != -1 && !titleToSetMap.isEmpty()) {
String spectrumName = spectraTbl.getValueAt(row, 1).toString().trim();
if (titleToSetMap.containsKey(spectrumName)) {
List<Pepnovohit> pepnovoList = titleToSetMap.get(spectrumName).getDenovoHits();
for (int i = 0; i < pepnovoList.size(); i++) {
Pepnovohit hit = pepnovoList.get(i);
if (hit != null) {
((DefaultTableModel) denovoTbl.getModel()).addRow(new Object[]{
i + 1,
hit.getSequence(),
hit.getRankscore().doubleValue(),
hit.getPnvscore().doubleValue(),
hit.getN_gap().doubleValue(),
hit.getC_gap().doubleValue(),
hit.getPrecursor_mh().doubleValue(),
hit.getCharge()
});
}
}
}
denovoTbl.getSelectionModel().setSelectionInterval(0, 0);
}
// Set the cursor back into the default status.
this.setCursor(new Cursor(Cursor.DEFAULT_CURSOR));
}
/**
* Adds spectrum annotations based on the selected de novo hit.
*
* @param hit
*/
private void addAnnotations(Pepnovohit hit) {
int row = spectraTbl.getSelectedRow();
if (row != -1) {
// convert the spectrum
ArrayList<Charge> precursorCharges = new ArrayList<Charge>();
precursorCharges.add(new Charge(selectedSpectrum.getCharge(), Charge.PLUS));
HashMap<Double,Peak> peakMap = new HashMap<Double, Peak>();
HashMap<Double,Double> mgfPeaks = selectedSpectrum.getPeaks();
Iterator<Double> iterator = mgfPeaks.keySet().iterator();
while (iterator.hasNext()) {
Double mass = iterator.next();
peakMap.put(mass, new Peak(mass, mgfPeaks.get(mass)));
}
MSnSpectrum currentSpectrum = new MSnSpectrum(2, new Precursor(0.0, selectedSpectrum.getPrecursorMZ(), precursorCharges), "no title", peakMap, "no filename");
// add the annotations
SpectrumAnnotator spectrumAnnotator = new SpectrumAnnotator();
// show annotated peaks in foreground, non-annotated in background
spectrumPanel.showAnnotatedPeaksOnly(!annotationPreferences.showAllPeaks());
if(hit.getSequence().contains("+")) {
int index = hit.getSequence().indexOf("+");
hit.setSequence(hit.getSequence().substring(0, index) + hit.getSequence().substring(index + 3, hit.getSequence().length()));
}
peptide = new Peptide(hit.getSequence(), new ArrayList<String>(), new ArrayList<ModificationMatch>());
// Get the de novo search parameters
int charge = (int) hit.getCharge();
annotationPreferences.setCurrentSettings(peptide, charge, true);
ArrayList<IonMatch> annotations = spectrumAnnotator.getSpectrumAnnotation(
annotationPreferences.getIonTypes(),
annotationPreferences.getNeutralLosses(),
annotationPreferences.getValidatedCharges(),
charge,
currentSpectrum, peptide,
currentSpectrum.getIntensityLimit(0.0),
0.5,
false);
spectrumPanel.setAnnotations(SpectrumAnnotator.getSpectrumAnnotation(annotations));
// add de novo sequencing
spectrumPanel.addAutomaticDeNovoSequencing(peptide, annotations,
PeptideFragmentIon.B_ION,
PeptideFragmentIon.Y_ION,
annotationPreferences.getDeNovoCharge(),
annotationPreferences.showForwardIonDeNovoTags(),
annotationPreferences.showRewindIonDeNovoTags());
updateAnnotationMenus(charge);
}
}
/**
* This method updates the results in the panel.
*
* @param denovoSearchResultSet
*/
public void updateResults(DenovoSearchResult denovoSearchResultSet) {
titleToSetMap = denovoSearchResultSet.getTitleToHitSet();
// Update the spectra table.
refreshSpectraTable();
enableAnnotationMenuBar();
}
/**
* This method updates the spectra table.
*/
private void refreshSpectraTable() {
((DefaultTableModel) spectraTbl.getModel()).setRowCount(0);
if (titleToSetMap != null) {
int i = 1;
Set<String> spectrumTitles = titleToSetMap.keySet();
for(String title : spectrumTitles) {
((DefaultTableModel) spectraTbl.getModel()).addRow(new Object[]{
i++,
title
});
}
spectraTbl.getSelectionModel().setSelectionInterval(0, 0);
}
}
private void updateModificationProfile(PeptideMatch peptideMatch, boolean selectedPeptideProfile) {
try {
ArrayList<ModificationProfile> profiles = getModificationProfile(peptideMatch.getTheoreticPeptide());
SequenceModificationPanel sequenceModificationPanel =
new SequenceModificationPanel(peptideMatch.getTheoreticPeptide().getNTerminal() + "-"
+ peptideMatch.getTheoreticPeptide().getSequence()
+ "-" + peptideMatch.getTheoreticPeptide().getCTerminal(),
profiles, true);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Returns the content of a Modification Profile cell for a desired peptide.
*
* @param peptide The sequence of the peptide
* @return The modification profile
*/
private ArrayList<ModificationProfile> getModificationProfile(Peptide peptide) {
ArrayList<ModificationProfile> profiles = new ArrayList<ModificationProfile>();
//Color ptmColor = peptideShakerGUI.getSearchParameters().getModificationProfile().getColor(ptmName);
//ModificationProfile tempProfile = new ModificationProfile(ptmName, new double[peptide.getSequence().length()][2], ptmColor);
return profiles;
}
/**
* Update the annotation menu bar with the current annotation preferences.
*
* @param precursorCharge The charge of the precursor.
*/
public void updateAnnotationMenus(int precursorCharge) {
ArrayList<String> selectedLosses = new ArrayList<String>();
for (JCheckBoxMenuItem lossMenuItem : lossMenus.values()) {
if (lossMenuItem.isSelected()) {
selectedLosses.add(lossMenuItem.getText());
}
lossMenu.remove(lossMenuItem);
}
lossMenu.setVisible(true);
//lossSplitter.setVisible(true);
lossMenus.clear();
HashMap<String, NeutralLoss> neutralLosses = new HashMap<String, NeutralLoss>();
for (NeutralLoss neutralLoss : IonFactory.getInstance().getDefaultNeutralLosses()) {
neutralLosses.put(neutralLoss.name, neutralLoss);
}
ArrayList<String> names = new ArrayList<String>(neutralLosses.keySet());
Collections.sort(names);
ArrayList<String> finalSelectedLosses = selectedLosses;
boolean selected;
if (names.isEmpty()) {
lossMenu.setEnabled(false);
//lossSplitter.setVisible(false);
} else {
for (int i = 0; i < names.size(); i++) {
if (annotationPreferences.areNeutralLossesSequenceDependant()) {
selected = false;
for (NeutralLoss neutralLoss : annotationPreferences.getNeutralLosses().getAccountedNeutralLosses()) {
if (neutralLoss.isSameAs(neutralLoss)) {
selected = true;
break;
}
}
} else {
if (finalSelectedLosses.contains(names.get(i))) {
selected = true;
} else {
selected = false;
}
}
JCheckBoxMenuItem lossMenuItem = new JCheckBoxMenuItem(names.get(i));
lossMenuItem.setSelected(selected);
lossMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
annotationPreferences.useAutomaticAnnotation(false);
annotationPreferences.setNeutralLossesSequenceDependant(false);
updateAnnotationPreferences();
}
});
lossMenus.put(neutralLosses.get(names.get(i)), lossMenuItem);
lossMenu.add(lossMenuItem, i);
}
}
ArrayList<String> selectedCharges = new ArrayList<String>();
for (JCheckBoxMenuItem chargeMenuItem : chargeMenus.values()) {
if (chargeMenuItem.isSelected()) {
selectedCharges.add(chargeMenuItem.getText());
}
chargeMenu.remove(chargeMenuItem);
}
chargeMenus.clear();
if (precursorCharge == 1) {
precursorCharge = 2;
}
final ArrayList<String> finalSelectedCharges = selectedCharges;
for (int charge = 1; charge < precursorCharge; charge++) {
JCheckBoxMenuItem chargeMenuItem = new JCheckBoxMenuItem(charge + "+");
if (annotationPreferences.useAutomaticAnnotation()) {
chargeMenuItem.setSelected(annotationPreferences.getValidatedCharges().contains(charge));
} else {
if (finalSelectedCharges.contains(charge + "+")) {
chargeMenuItem.setSelected(true);
} else {
chargeMenuItem.setSelected(false);
}
}
chargeMenuItem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
annotationPreferences.useAutomaticAnnotation(false);
updateAnnotationPreferences();
}
});
chargeMenus.put(charge, chargeMenuItem);
chargeMenu.add(chargeMenuItem);
}
}
/**
* Save the current annotation preferences selected in the annotation menus.
*/
public void updateAnnotationPreferences() {
annotationPreferences.clearIonTypes();
if (aIonCheckBoxMenuItem.isSelected()) {
annotationPreferences.addIonType(Ion.IonType.PEPTIDE_FRAGMENT_ION, PeptideFragmentIon.A_ION);
}
if (bIonCheckBoxMenuItem.isSelected()) {
annotationPreferences.addIonType(Ion.IonType.PEPTIDE_FRAGMENT_ION, PeptideFragmentIon.B_ION);
}
if (cIonCheckBoxMenuItem.isSelected()) {
annotationPreferences.addIonType(Ion.IonType.PEPTIDE_FRAGMENT_ION, PeptideFragmentIon.C_ION);
}
if (xIonCheckBoxMenuItem.isSelected()) {
annotationPreferences.addIonType(Ion.IonType.PEPTIDE_FRAGMENT_ION, PeptideFragmentIon.X_ION);
}
if (yIonCheckBoxMenuItem.isSelected()) {
annotationPreferences.addIonType(Ion.IonType.PEPTIDE_FRAGMENT_ION, PeptideFragmentIon.Y_ION);
}
if (zIonCheckBoxMenuItem.isSelected()) {
annotationPreferences.addIonType(Ion.IonType.PEPTIDE_FRAGMENT_ION, PeptideFragmentIon.Z_ION);
}
if (precursorCheckMenuItem.isSelected()) {
annotationPreferences.addIonType(Ion.IonType.PRECURSOR_ION);
}
if (immoniumIonsCheckMenuItem.isSelected()) {
annotationPreferences.addIonType(Ion.IonType.IMMONIUM_ION);
}
annotationPreferences.clearNeutralLosses();
for (NeutralLoss neutralLoss : lossMenus.keySet()) {
if (lossMenus.get(neutralLoss).isSelected()) {
annotationPreferences.addNeutralLoss(neutralLoss);
}
}
annotationPreferences.clearCharges();
for (int charge : chargeMenus.keySet()) {
if (chargeMenus.get(charge).isSelected()) {
annotationPreferences.addSelectedCharge(charge);
}
}
annotationPreferences.setShowAllPeaks(allCheckBoxMenuItem.isSelected());
//annotationPreferences.setShowBars(barsCheckBoxMenuItem.isSelected());
annotationPreferences.setShowForwardIonDeNovoTags(forwardIonsDeNovoCheckBoxMenuItem.isSelected());
annotationPreferences.setShowRewindIonDeNovoTags(rewindIonsDeNovoCheckBoxMenuItem.isSelected());
if (deNovoChargeOneJRadioButtonMenuItem.isSelected()) {
annotationPreferences.setDeNovoCharge(1);
} else {
annotationPreferences.setDeNovoCharge(2);
}
// Refresh the spectrum panel
refreshSpectrumPanel();
}
}
| apache-2.0 |
yig/SubdivisionSkinning | lib/Timing.hpp | 295 | #ifndef __Timing_hpp__
#define __Timing_hpp__
void tic( const char* label = 0 ) ;
void toc( const char* label = 0 ) ;
struct Tick
{
Tick( const char* alabel = 0 ) : label( alabel ) { tic( label ); }
~Tick() { toc( label ); }
const char* label;
};
#endif /* __Timing_hpp__ */
| apache-2.0 |
scootdev/scoot | config/scootconfig/saga_log_modules.go | 1288 | package scootconfig
import (
"time"
"github.com/twitter/scoot/ice"
"github.com/twitter/scoot/saga"
"github.com/twitter/scoot/saga/sagalogs"
)
// InMemorySagaLog struct is used by goice to create an InMemory instance
// of the SagaLog interface.
type InMemorySagaLogConfig struct {
Type string
ExpirationSec int
GCIntervalSec int
}
// Adds the InMemorySagaLog Create function to the goice MagicBag
func (c *InMemorySagaLogConfig) Install(bag *ice.MagicBag) {
bag.Put(c.Create)
}
// Creates an instance of an InMemorySagaLog
func (c *InMemorySagaLogConfig) Create() saga.SagaLog {
return sagalogs.MakeInMemorySagaLog(
time.Duration(c.ExpirationSec)*time.Second,
time.Duration(c.GCIntervalSec)*time.Second)
}
// FileSagaLogConfig struct is used by goice to create a FileSagaLog
// instance of the SagaLog interface
// Directory specifies the name of the directory to store
// Sagalog files in.
type FileSagaLogConfig struct {
Type string
Directory string
}
// Adds the FileSagaLogConfig Create function to the goice MagicBag
func (c *FileSagaLogConfig) Install(bag *ice.MagicBag) {
bag.Put(c.Create)
}
// Creates an instance of the FileSagaLog
func (c *FileSagaLogConfig) Create() (saga.SagaLog, error) {
return sagalogs.MakeFileSagaLog(c.Directory)
}
| apache-2.0 |
Budlee/Bonfire-Google_Authenticator | users/libraries/auth.php | 36378 | <?php if (!defined('BASEPATH')) exit('No direct script access allowed');
/**
* Bonfire
*
* An open source project to allow developers get a jumpstart their development of CodeIgniter applications
*
* @package Bonfire
* @author Bonfire Dev Team
* @copyright Copyright (c) 2011 - 2013, Bonfire Dev Team
* @license http://guides.cibonfire.com/license.html
* @link http://cibonfire.com
* @since Version 1.0
* @filesource
*/
// ------------------------------------------------------------------------
/**
* Auth Library
*
* Provides authentication functions for logging users in/out, restricting access
* to controllers, and managing login attempts.
*
* Security and ease-of-use are the two primary goals of the Auth system in Bonfire.
* This lib will be constantly updated to reflect the latest security practices that
* we learn about, while maintaining the simple API.
*
* @package Bonfire
* @subpackage Modules_Users
* @category Libraries
* @author Bonfire Dev Team
* @link http://guides.cibonfire.com/helpers/file_helpers.html
*
*/
class Auth
{
/**
* The url to redirect to on successful login.
*
* @access public
*
* @var string
*/
public $login_destination = '';
/**
* Stores the logged in user after the first test to improve performance.
*
* @access private
*
* @var object
*/
private $user;
/**
* Stores the ip_address of the current user for performance reasons.
*
* @access private
*
* @var string
*/
private $ip_address;
/**
* Stores the name of all existing permissions
*
* @access private
*
* @var array
*/
private $permissions = NULL;
/**
* Stores permissions by role so we don't have to scour the database more than once.
*
* @access private
*
* @var array
*/
private $role_permissions = array();
/**
* A pointer to the CodeIgniter instance.
*
* @access private
*
* @var object
*/
private $ci;
//--------------------------------------------------------------------
/**
* Grabs a pointer to the CI instance, gets the user's IP address,
* and attempts to automatically log in the user.
*
* @return void
*/
public function __construct()
{
$this->ci =& get_instance();
$this->ip_address = $this->ci->input->ip_address();
// We need the users language file for this to work
// from other modules.
$this->ci->lang->load('users/users');
$this->ci->load->model('users/user_model');
$this->ci->load->library('session');
// Try to log the user in from session/cookie data
$this->autologin();
log_message('debug', 'Auth class initialized.');
}//end __construct()
//--------------------------------------------------------------------
/**
* Attempt to log the user in.
*
* @access public
*
* @param string $login The user's login credentials (email/username)
* @param string $password The user's password
* @param bool $remember Whether the user should be remembered in the system.
*
* @return bool
*/
public function login($login, $password, $remember=FALSE)
{
if (empty($login) || empty($password))
{
$error = $this->ci->settings_lib->item('auth.login_type') == 'both' ? lang('bf_username') .'/'. lang('bf_email') : ucfirst($this->ci->settings_lib->item('auth.login_type'));
Template::set_message(sprintf(lang('us_fields_required'), $error), 'error');
return FALSE;
}
$this->ci->load->model('users/User_model', 'user_model');
// Grab the user from the db
$selects = 'id, email, username, users.role_id, users.deleted, users.active, banned, ban_message, password_hash, force_password_reset';
if ($this->ci->settings_lib->item('auth.do_login_redirect'))
{
$selects .= ', login_destination';
}
if ($this->ci->settings_lib->item('auth.login_type') == 'both')
{
$user = $this->ci->user_model->select($selects)->find_by(array('username' => $login, 'email' => $login), null, 'or');
}
else
{
$user = $this->ci->user_model->select($selects)->find_by($this->ci->settings_lib->item('auth.login_type'), $login);
}
// check to see if a value of FALSE came back, meaning that the username or email or password doesn't exist.
if ($user == FALSE)
{
Template::set_message(lang('us_bad_email_pass'), 'error');
return FALSE;
}
// check if the account has been activated.
$activation_type = $this->ci->settings_lib->item('auth.user_activation_method');
if ($user->active == 0 && $activation_type > 0) // in case we go to a unix timestamp later, this will still work.
{
if ($activation_type == 1)
{
Template::set_message(lang('us_account_not_active'), 'error');
}
elseif ($activation_type == 2)
{
Template::set_message(lang('us_admin_approval_pending'), 'error');
}
return FALSE;
}
// check if the account has been soft deleted.
if ($user->deleted >= 1) // in case we go to a unix timestamp later, this will still work.
{
Template::set_message(sprintf(lang('us_account_deleted'), html_escape(settings_item("site.system_email"))), 'error');
return FALSE;
}
// Try password
if ($this->check_password($password, $user->password_hash))
{
// check if the account has been banned.
if ($user->banned)
{
$this->increase_login_attempts($login);
Template::set_message($user->ban_message ? $user->ban_message : lang('us_banned_msg'), 'error');
return FALSE;
}
// Check if the user needs to reset their password
if ($user->force_password_reset == 1)
{
Template::set_message(lang('us_forced_password_reset_note'), 'warning');
// Need to generate a reset hash to pass the reset_password checks...
$this->ci->load->helpers(array('string', 'security'));
$pass_code = random_string('alnum', 40);
$hash = do_hash($pass_code . $user->email);
// Save the hash to the db so we can confirm it later.
$this->ci->user_model->update_where('id', $user->id, array('reset_hash' => $hash, 'reset_by' => strtotime("+24 hours") ));
$this->ci->session->set_userdata('pass_check', $hash);
$this->ci->session->set_userdata('email', $user->email);
redirect('/users/reset_password');
}
$this->clear_login_attempts($login);
// We've successfully validated the login, so setup the session
$this->setup_session($user->id, $user->username, $user->password_hash, $user->email, $user->role_id, $remember,'', $user->username);
// Save the login info
$data = array(
'last_login' => date('Y-m-d H:i:s', time()),
'last_ip' => $this->ip_address,
);
$this->ci->user_model->update($user->id, $data);
// Clear the cached result of user() (and hence is_logged_in(), user_id() etc).
// Doesn't fix `$this->current_user` in controller (for this page load)...
unset($this->user);
$trigger_data = array('user_id'=>$user->id, 'role_id'=>$user->role_id);
Events::trigger('after_login', $trigger_data );
// Save our redirect location
$this->login_destination = isset($user->login_destination) && !empty($user->login_destination) ? $user->login_destination : '';
return TRUE;
}
// Bad password
else
{
Template::set_message(lang('us_bad_email_pass'), 'error');
$this->increase_login_attempts($login);
}
return FALSE;
}//end login()
//--------------------------------------------------------------------
/**
* Destroys the autologin information and the current session.
*
* @access public
*
* @return void
*/
public function logout()
{
$data = array(
'user_id' => $this->user_id(),
'role_id' => $this->role_id()
);
Events::trigger('before_logout', $data);
// Destroy the autologin information
$this->delete_autologin();
// Destroy the session
$this->ci->session->sess_destroy();
}//end logout()
//--------------------------------------------------------------------
/**
* Checks the session for the required info, then verifies against the database.
*
* @access public
*
* @return object (or a false value)
*/
public function user()
{
// If we've already checked this session,
// return that.
if (isset($this->user))
{
return $this->user;
}
$this->user = FALSE;
// Is there any session data we can use?
if ($this->ci->session->userdata('identity') && $this->ci->session->userdata('user_id') )
{
// Grab the user account
$user = $this->ci->user_model->find($this->ci->session->userdata('user_id'));
if ($user !== FALSE)
{
//Is this user using Google Authenticator?
$ga_enabled = $user->ga_enabled;
// load do_hash()
$this->ci->load->helper('security');
if (($ga_enabled == FALSE ) || ($ga_enabled == TRUE && do_hash($user->id . $user->ga_salt) === $this->ci->session->userdata('ga_token')) == TRUE)
{
// Ensure user_token is still equivalent to the SHA1 of the user_id and password_hash
if (do_hash($this->ci->session->userdata('user_id') . $user->password_hash) === $this->ci->session->userdata('user_token'))
{
$this->user = $user;
}
}
}
}//end if
if ($this->user !== FALSE)
{
$this->user->id = (int) $this->user->id;
$this->user->role_id = (int) $this->user->role_id;
}
return $this->user;
}//end user()
//--------------------------------------------------------------------
/**
* Checks the session for the required info, then verifies against the database.
*
* @access public
*
* @return bool
*/
public function is_logged_in()
{
return (bool) $this->user();
}//end is_logged_in()
//--------------------------------------------------------------------
/**
* Checks that a user is logged in (and, optionally of the correct role)
* and, if not, send them to the login screen.
*
* If no permission is checked, will simply verify that the user is logged in.
* If a permission is passed in to the first parameter, will check the user's role
* and verify that role has the appropriate permission.
*
* @access public
*
* @param string $permission (Optional) A string representing the permission to check for.
* @param string $uri (Optional) A string representing an URI to redirect, if FALSE
*
* @return bool TRUE if the user has the appropriate access permissions. Redirect to the previous page if the user doesn't have permissions. Redirect to LOGIN_AREA page if the user is not logged in.
*/
public function restrict($permission=NULL, $uri=NULL)
{
// If user isn't logged in, don't need to check permissions
if ($this->is_logged_in() === FALSE)
{
$this->ci->load->library('Template');
Template::set_message($this->ci->lang->line('us_must_login'), 'error');
Template::redirect(LOGIN_URL);
}
// Check to see if the user has the proper permissions
if ( ! empty($permission) && ! $this->has_permission($permission))
{
// set message telling them no permission THEN redirect
Template::set_message( lang('us_no_permission'), 'attention');
if ( ! $uri)
{
$uri = $this->ci->session->userdata('previous_page');
// If previous page was the same (e.g. user pressed F5),
// but permission has been removed, then redirecting
// to it will cause an infinite loop.
if ($uri == current_url())
{
$uri = site_url();
}
}
Template::redirect($uri);
}
return TRUE;
}//end restrict()
//--------------------------------------------------------------------
//--------------------------------------------------------------------
// !UTILITY METHODS
//--------------------------------------------------------------------
/**
* Retrieves the user_id from the current session.
*
* @access public
*
* @return int
*/
public function user_id()
{
if ( ! $this->is_logged_in())
{
return FALSE;
}
return $this->user()->id;
}//end user_id()
//--------------------------------------------------------------------
/**
* Retrieves the logged identity from the current session.
* Built from the user's submitted login.
*
* @access public
*
* @return string The identity used to login.
*/
public function identity()
{
if ( ! $this->is_logged_in())
{
return FALSE;
}
return $this->ci->session->userdata('identity');
}//end identity()
//--------------------------------------------------------------------
/**
* Retrieves the role_id from the current session.
*
* @return int The user's role_id.
*/
public function role_id()
{
if ( ! $this->is_logged_in())
{
return FALSE;
}
return $this->user()->role_id;
}//end role_id()
//--------------------------------------------------------------------
/**
* Verifies that the user is logged in and has the appropriate access permissions.
*
* @access public
*
* @param string $permission A string with the permission to check for, ie 'Site.Signin.Allow'
* @param int $role_id The id of the role to check the permission against. If role_id is not passed into the method, then it assumes it to be the current user's role_id.
* @param bool $override Whether or not access is granted if this permission doesn't exist in the database
*
* @return bool TRUE/FALSE
*/
public function has_permission($permission, $role_id=NULL, $override = FALSE)
{
// move permission to lowercase for easier checking.
$permission = strtolower($permission);
// If no role is being provided, assume it's for the current
// logged in user.
if (empty($role_id))
{
$role_id = $this->role_id();
}
$this->load_permissions();
$this->load_role_permissions($role_id);
// did we pass?
if (isset($this->permissions[$permission]))
{
$permission_id = $this->permissions[$permission];
if (isset($this->role_permissions[$role_id][$permission_id]))
{
return TRUE;
}
}
elseif ($override)
{
return TRUE;
}
return FALSE;
}//end has_permission()
//--------------------------------------------------------------------
/**
* Checks to see whether a permission is in the system or not.
*
* @access public
*
* @param string $permission The name of the permission to check for. NOT case sensitive.
*
* @return bool TRUE/FALSE
*/
public function permission_exists($permission)
{
// move permission to lowercase for easier checking.
$permission = strtolower($permission);
$this->load_permissions();
return isset($this->permissions[$permission]);
}//end permission_exists()
//--------------------------------------------------------------------
/**
* Load the permission names from the database
*
* @access public
*
* @param int $role_id An INT with the role id to grab permissions for.
*
* @return void
*/
private function load_permissions()
{
if ( ! isset($this->permissions))
{
$this->ci->load->model('permissions/permission_model');
$perms = $this->ci->permission_model->find_all();
$this->permissions = array();
foreach ($perms as $perm)
{
$this->permissions[strtolower($perm->name)] = $perm->permission_id;
}
}
}//end load_permissions()
/**
* Load the role permissions from the database
*
* @access public
*
* @param int $role_id An INT with the role id to grab permissions for.
*
* @return void
*/
private function load_role_permissions($role_id=NULL)
{
$role_id = ! is_null($role_id) ? $role_id : $this->role_id();
if ( ! isset($this->role_permissions[$role_id]))
{
$this->ci->load->model('roles/role_permission_model');
$role_perms = $this->ci->role_permission_model->find_for_role($role_id);
$this->role_permissions[$role_id] = array();
if (is_array($role_perms))
{
foreach($role_perms as $permission)
{
$this->role_permissions[$role_id][$permission->permission_id] = TRUE;
}
}
}
}//end load_role_permissions()
//--------------------------------------------------------------------
/**
* Retrieves the role_name for the requested role.
*
* @access public
*
* @param int $role_id An int representing the role_id.
*
* @return string A string with the name of the matched role.
*/
public function role_name_by_id($role_id)
{
if ( ! is_numeric($role_id))
{
return '';
}
$roles = array();
// If we already stored the role names, use those...
if (isset($this->role_names))
{
$roles = $this->role_names;
}
else
{
if ( ! class_exists('Role_model'))
{
$this->ci->load->model('roles/role_model');
}
$results = $this->ci->role_model->select('role_id, role_name')->find_all();
foreach ($results as $role)
{
$roles[$role->role_id] = $role->role_name;
}
}
// Try to return the role name
if (isset($roles[$role_id]))
{
return $roles[$role_id];
}
return '';
}//end role_name_by_id()
//--------------------------------------------------------------------
/*
* Passwords
*/
/**
* Hash a password
*
* @param String $pass The password to hash
* @param Int $iterations The number of iterations used in hashing the password
*
* @return Array An associative array containing the hashed password and number of iterations
*/
public function hash_password($pass, $iterations=0)
{
// The shortest valid hash phpass can currently return is 20 characters,
// which would only happen with CRYPT_EXT_DES
$min_hash_len = 20;
if (empty($iterations) || ! is_numeric($iterations) || $iterations <= 0)
{
$iterations = $this->ci->settings_lib->item('password_iterations');
}
// Load the password hash library
if ( ! class_exists('PasswordHash'))
{
require(dirname(__FILE__) . '/../libraries/PasswordHash.php');
}
$hasher = new PasswordHash($iterations, false);
$password = $hasher->HashPassword($pass);
unset($hasher);
if (strlen($password) < $min_hash_len)
{
return false;
}
return array('hash' => $password, 'iterations' => $iterations);
}
//--------------------------------------------------------------------
/**
* Check the supplied password against the supplied hash
*
* @param String $password The password to check
* @param String $hash The hash
*
* @return Bool true if the password and hash match, else false
*/
public function check_password($password, $hash)
{
// Load the password hash library
if ( ! class_exists('PasswordHash'))
{
require(dirname(__FILE__) .'/PasswordHash.php');
}
// Try password
$hasher = new PasswordHash(-1, false);
$return = $hasher->CheckPassword($password, $hash);
unset($hasher);
return $return;
}
//--------------------------------------------------------------------
// !LOGIN ATTEMPTS
//--------------------------------------------------------------------
/**
* Records a login attempt into the database.
*
* @access protected
*
* @param string $login The login id used (typically email or username)
*
* @return void
*/
protected function increase_login_attempts($login)
{
$this->ci->db->insert('login_attempts', array('ip_address' => $this->ip_address, 'login' => $login));
}//end increase_login_attempts()
//--------------------------------------------------------------------
/**
* Clears all login attempts for this user, as well as cleans out old logins.
*
* @access protected
*
* @param string $login The login credentials (typically email)
* @param int $expires The time (in seconds) that attempts older than will be deleted
*
* @return void
*/
protected function clear_login_attempts($login, $expires = 86400)
{
$this->ci->db->where(array('ip_address' => $this->ip_address, 'login' => $login));
// Purge obsolete login attempts
$this->ci->db->or_where('time <', date('Y-m-d H:i:s', time() - $expires));
$this->ci->db->delete('login_attempts');
}//end clear_login_attempts()
//--------------------------------------------------------------------
/**
* Get number of attempts to login occurred from given IP-address and/or login
*
* @param string $login (Optional) The login id to check for (email/username). If no login is passed in, it will only check against the IP Address of the current user.
*
* @return int An int with the number of attempts.
*/
function num_login_attempts($login=NULL)
{
$this->ci->db->select('1', FALSE);
$this->ci->db->where('ip_address', $this->ip_address);
if (strlen($login) > 0)
{
$this->ci->db->or_where('login', $login);
}
$query = $this->ci->db->get('login_attempts');
return $query->num_rows();
}//end num_login_attempts()
//--------------------------------------------------------------------
// !AUTO-LOGIN
//--------------------------------------------------------------------
/**
* Attempts to log the user in based on an existing 'autologin' cookie.
*
* @access private
*
* @return void
*/
private function autologin()
{
$this->ci->load->library('settings/settings_lib');
if ($this->ci->settings_lib->item('auth.allow_remember') == FALSE)
{
return;
}
$this->ci->load->helper('cookie');
$cookie = get_cookie('autologin', TRUE);
if ( ! $cookie)
{
return;
}
// We have a cookie, so split it into user_id and token
if(count (explode('~', $cookie)) == 3)
{
list($user_id, $test_token, $ga_token) = explode('~', $cookie);
}
else
{
list($user_id, $test_token) = explode('~', $cookie);
$ga_token = FALSE;
}
// Try to pull a match from the database
$this->ci->db->where( array('user_id' => $user_id, 'token' => $test_token) );
$query = $this->ci->db->get('user_cookies');
if ($query->num_rows() == 1)
{
$user_ga = $this->ci->user_model->select('id, ga_enabled')->find($user_id);
if($user_ga->ga_enabled == 1 && ($query->row()->ga_token != $ga_token) )
{
return;
}
// Save logged in status to save on db access later.
$this->logged_in = TRUE;
// If a session doesn't exist, we need to refresh our autologin token
// and get the session started.
if ( ! $this->ci->session->userdata('user_id'))
{
// Grab the current user info for the session
$this->ci->load->model('users/User_model', 'user_model');
$user = $this->ci->user_model->select('id, username, email, password_hash, users.role_id')->find($user_id);
if ( ! $user)
{
return;
}
$this->setup_session($user->id, $user->username, $user->password_hash, $user->email, $user->role_id, TRUE, $test_token, $user->username);
if($user_ga->ga_enabled == 1)
{
//Need to query DB to get new token
$this->ci->load->helper('string');
$new_salt = random_string('alnum', 128);
$this->ci->db->where('id', $user_id);
$this->ci->db->set('ga_salt', $new_salt);
$this->ci->db->update('users');
$this->ci->session->set_userdata(array('ga_token'=>do_hash($user->id.$new_salt)));
$this->ci->db->order_by("created_on", "desc");
$this->ci->db->where( array('user_id' => $user_id) );
$query = $this->ci->db->get('user_cookies');
$this->create_google_auth_autologin_token($user_id, $query->row()->token);
}
}
}
}//end autologin()
//--------------------------------------------------------------------
/**
* Create the auto-login entry in the database. This method uses
* Charles Miller's thoughts at:
* http://fishbowl.pastiche.org/2004/01/19/persistent_login_cookie_best_practice/
*
* @access private
*
* @param int $user_id An int representing the user_id.
* @param string $old_token The previous token that was used to login with.
*
* @return bool Whether the autologin was created or not.
*/
private function create_autologin($user_id, $old_token=NULL)
{
if ($this->ci->settings_lib->item('auth.allow_remember') == FALSE)
{
return FALSE;
}
// load random_string()
$this->ci->load->helper('string');
// Generate a random string for our token
$token = random_string('alnum', 128);
// If an old_token is presented, we're refreshing the autologin information
// otherwise we're creating a new one.
if (empty($old_token))
{
// Create a new token
$data = array(
'user_id' => $user_id,
'token' => $token,
'created_on' => date('Y-m-d H:i:s')
);
$this->ci->db->insert('user_cookies', $data);
}
else
{
// Refresh the token
$this->ci->db->where('user_id', $user_id);
$this->ci->db->where('token', $old_token);
$this->ci->db->set('token', $token);
$this->ci->db->set('created_on', date('Y-m-d H:i:s'));
$this->ci->db->update('user_cookies');
}
if ($this->ci->db->affected_rows())
{
// Create the autologin cookie
$this->ci->input->set_cookie('autologin', $user_id .'~'. $token, $this->ci->settings_lib->item('auth.remember_length'));
return TRUE;
}
else
{
return FALSE;
}
}//end create_autologin()()
//--------------------------------------------------------------------
/**
* Deletes the autologin cookie for the current user.
*
* @access private
*
* @return void
*/
private function delete_autologin()
{
if ($this->ci->settings_lib->item('auth.allow_remember') == FALSE)
{
return;
}
// First things first.. grab the cookie so we know what row
// in the user_cookies table to delete.
$this->ci->load->helper('cookie');
$cookie = get_cookie('autologin');
if ($cookie)
{
list($user_id, $token) = explode('~', $cookie);
// Now we can delete the cookie
delete_cookie('autologin');
// And clean up the database
$this->ci->db->where('user_id', $user_id);
$this->ci->db->where('token', $token);
$this->ci->db->delete('user_cookies');
}
// Also perform a clean up of any autologins older than 2 months
$this->ci->db->where('created_on', '< DATE_SUB(CURDATE(), INTERVAL 2 MONTH)');
$this->ci->db->delete('user_cookies');
}//end delete_autologin()
//--------------------------------------------------------------------
/**
* Creates the session information for the current user. Will also create an autologin cookie if required.
*
* @access private
*
* @param int $user_id An int with the user's id
* @param string $username The user's username
* @param string $password_hash The user's password hash. Used to create a new, unique user_token.
* @param string $email The user's email address
* @param int $role_id The user's role_id
* @param bool $remember A boolean (TRUE/FALSE). Whether to keep the user logged in.
* @param string $old_token User's db token to test against
* @param string $user_name User's made name for displaying options
*
* @return bool TRUE/FALSE on success/failure.
*/
private function setup_session($user_id, $username, $password_hash, $email, $role_id, $remember=FALSE, $old_token=NULL,$user_name='')
{
// What are we using as login identity?
// Should I use _identity_login() and move below code?
// If "both", defaults to email, unless we display usernames globally
if (($this->ci->settings_lib->item('auth.login_type') == 'both'))
{
$login = $this->ci->settings_lib->item('auth.use_usernames') ? $username : $email;
}
else
{
$login = $this->ci->settings_lib->item('auth.login_type') == 'username' ? $username : $email;
}
// TODO: consider taking this out of setup_session()
if ($this->ci->settings_lib->item('auth.use_usernames') == 0 && $this->ci->settings_lib->item('auth.login_type') == 'username')
{
// if we've a username at identity, and don't want made user name, let's have an email nearby.
$us_custom = $email;
}
else
{
// For backward compatibility, defaults to username
$us_custom = $this->ci->settings_lib->item('auth.use_usernames') == 2 ? $user_name : $username;
}
// Save the user's session info
// load do_hash()
$this->ci->load->helper('security');
$data = array(
'user_id' => $user_id,
'auth_custom' => $us_custom,
'user_token' => do_hash($user_id . $password_hash),
'identity' => $login,
'role_id' => $role_id,
'logged_in' => TRUE,
);
$this->ci->session->set_userdata($data);
// Should we remember the user?
if ($remember === TRUE)
{
return $this->create_autologin($user_id, $old_token);
}
return TRUE;
}//end setup_session
//--------------------------------------------------------------------
/**
* Returns the identity to be used upon user registration.
*
* @access private
* @todo Decision to be made with this method.
*
* @return void
*/
private function _identity_login()
{
//Should I move indentity conditional code from setup_session() here?
//Or should conditional code be moved to auth->identity(),
// and if Optional TRUE is passed, it would then determine wich identity to store in userdata?
}//end _identity_login()
//--------------------------------------------------------------------
//**Google authentication autherisation methods**//
/**
* A method to check if a user has Google Authentication on.
*
* @access public
*
* @param string $login The user's login credentials (email/username)
*
* @return bool If Google Authenticator is being used TRUE on users account. True is returned, ELSE FALSE is returned
*/
public function is_google_authenticator_enabled($login)
{
if (empty($login))
{
return FALSE;
}
$this->ci->load->model('users/User_model', 'user_model');
$selects = 'id, ga_enabled';
if ($this->ci->settings_lib->item('auth.login_type') == 'both')
{
$user = $this->ci->user_model->select($selects)->find_by(array('username' => $login, 'email' => $login), null, 'or');
}
else
{
$user = $this->ci->user_model->select($selects)->find_by($this->ci->settings_lib->item('auth.login_type'), $login);
}
// check to see if a value of FALSE came back, meaning that the username or email or password doesn't exist.
if ($user == FALSE)
{
return FALSE;
}
if ($user->ga_enabled == 1)
{
return TRUE;
}
return FALSE;
}
/**
* A method to be used once the user has entered there verification code
* and that they get logged into the system fully
*
* @access public
*
* @param string $login The user's login credentials (email/username)
* @param string $users_ga_code The user's supplied verification code
*
* @return bool If user's supplied authentication is correct then TRUE is returned else FALSE
*/
public function google_authenticator_login($user_id, $users_ga_code)
{
$user = $this->ci->user_model->select('username, ga_seed, ga_salt')->find($user_id);
$this->ci->load->library('users/g_auth');
if($this->ci->g_auth->verify_key($user->ga_seed, $users_ga_code))
{
$this->clear_login_attempts($user->username);
$this->ci->session->set_userdata(array('ga_token'=>do_hash($user_id.$user->ga_salt)));
$this->ci->load->helper('cookie');
$cookie = get_cookie('autologin', TRUE);
if ( ! $cookie)
{
return TRUE;
}
list($user_id, $token) = explode('~', $cookie);
$this->create_google_auth_autologin_token($user_id,$token);
return TRUE;
}
$this->increase_login_attempts($user->username);
Template::set_message(lang('us_google_auth_verify_error'), 'error');
return FALSE;
}
/**
* This method is used so that when a user completes the login then if they
* are using remember me it will then add in there specific autologin details
*
* @access protected
*
* @param string $login
*
* @return bool If autologin cookie was affected or the user chose not to use an
* autologin cookie. If the update to the autologin cookie failed FALSE is
* returned
*/
protected function create_google_auth_autologin_token($user_id, $token)
{
$this->ci->load->helper('string');
// Generate a random string for our token
$ga_token = random_string('alnum', 128);
// Refresh the token
$this->ci->db->where('user_id', $user_id);
$this->ci->db->where('token', $token);
$this->ci->db->set('ga_token', $ga_token);
$this->ci->db->set('created_on', date('Y-m-d H:i:s'));
$this->ci->db->update('user_cookies');
if ($this->ci->db->affected_rows())
{
// Create the autologin cookie
$this->ci->input->set_cookie('autologin', $user_id .'~'. $token.'~'. $ga_token, $this->ci->settings_lib->item('auth.remember_length'));
return TRUE;
}
else
{
return FALSE;
}
}
}
//end Auth
//--------------------------------------------------------------------
if ( ! function_exists('has_permission'))
{
/**
* A convenient shorthand for checking user permissions.
*
* @access public
*
* @param string $permission The permission to check for, ie 'Site.Signin.Allow'
* @param bool $override Whether or not access is granted if this permission doesn't exist in the database
*
* @return bool TRUE/FALSE
*/
function has_permission($permission, $override = FALSE)
{
$ci =& get_instance();
return $ci->auth->has_permission($permission, NULL, $override);
}//end has_permission()
}
//--------------------------------------------------------------------
if ( ! function_exists('permission_exists'))
{
/**
* Checks to see whether a permission is in the system or not.
*
* @access public
*
* @param string $permission The name of the permission to check for. NOT case sensitive.
*
* @return bool TRUE/FALSE
*/
function permission_exists($permission)
{
$ci =& get_instance();
return $ci->auth->permission_exists($permission);
}//end permission_exists()
}
//--------------------------------------------------------------------
if ( ! function_exists('abbrev_name'))
{
/**
* Retrieves first and last name from given string.
*
* @access public
*
* @param string $name Full name
*
* @return string The First and Last name from given parameter.
*/
function abbrev_name($name)
{
if (is_string($name))
{
list($fname, $lname) = explode(' ', $name, 2);
if (is_null($lname)) // Meaning only one name was entered...
{
$lastname = ' ';
}
else
{
$lname = explode( ' ', $lname );
$size = sizeof($lname);
$lastname = $lname[$size-1]; //
}
return trim($fname.' '.$lastname) ;
}
// TODO: Consider an optional parameter for picking custom var session.
// Making it auth private, and using auth custom var
return $name;
}//end abbrev_name()
} | apache-2.0 |
adamrduffy/trinidad-1.0.x | trinidad-api/src/main/java/org/apache/myfaces/trinidad/event/SetActionListener.java | 3966 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.myfaces.trinidad.event;
import javax.faces.component.StateHolder;
import javax.faces.context.FacesContext;
import javax.faces.el.ValueBinding;
import javax.faces.event.ActionEvent;
import javax.faces.event.ActionListener;
import org.apache.myfaces.trinidad.bean.FacesBean;
import org.apache.myfaces.trinidad.bean.FacesBeanImpl;
import org.apache.myfaces.trinidad.bean.PropertyKey;
import org.apache.myfaces.trinidad.logging.TrinidadLogger;
/**
* ActionListener that supports getting a value from
* one binding and setting it on another.
*/
public class SetActionListener implements ActionListener, StateHolder
{
/**
* Creates a SetActionListener.
*/
public SetActionListener()
{
_bean = new Bean();
}
/**
* Gets the value from the "from" property and sets it on
* the ValueBinding for the "to" property
*/
public void processAction(ActionEvent event)
{
ValueBinding to = _bean.getValueBinding(Bean.TO_KEY);
if (to != null)
{
Object from = getFrom();
try
{
to.setValue(FacesContext.getCurrentInstance(), from);
}
catch (RuntimeException e)
{
if (_LOG.isWarning())
{
ValueBinding fromBinding = _bean.getValueBinding(Bean.FROM_KEY);
String mes = "Error setting:'"+to.getExpressionString() +
"' to value:"+from;
if (fromBinding != null)
mes += " from:'"+fromBinding.getExpressionString()+"'";
_LOG.warning(mes, e);
}
throw e;
}
}
}
public ValueBinding getValueBinding(String name)
{
PropertyKey key = Bean.TYPE.findKey(name);
if (key == null)
return null;
return _bean.getValueBinding(key);
}
public void setValueBinding(String name, ValueBinding binding)
{
PropertyKey key = Bean.TYPE.findKey(name);
if (key == null)
throw new IllegalArgumentException();
_bean.setValueBinding(key, binding);
}
public Object getFrom()
{
return _bean.getProperty(Bean.FROM_KEY);
}
public void setFrom(Object from)
{
_bean.setProperty(Bean.FROM_KEY, from);
}
public Object saveState(FacesContext context)
{
return _bean.saveState(context);
}
public void restoreState(FacesContext context, Object state)
{
_bean.restoreState(context, state);
}
public boolean isTransient()
{
return false;
}
public void setTransient(boolean newTransientValue)
{
throw new UnsupportedOperationException();
}
// saveState() and restoreState() come from FacesBeanImpl
static private class Bean extends FacesBeanImpl
{
static public final FacesBean.Type TYPE = new FacesBean.Type();
static public final PropertyKey FROM_KEY =
TYPE.registerKey("from");
// Must be a ValueBinding
static public final PropertyKey TO_KEY =
TYPE.registerKey("to");
@Override
public Type getType()
{
return TYPE;
}
static
{
TYPE.lock();
}
}
private Bean _bean;
private static final TrinidadLogger _LOG = TrinidadLogger.createTrinidadLogger(SetActionListener.class);
}
| apache-2.0 |
dbflute-test/dbflute-test-dbms-oracle | src/main/java/org/docksidestage/oracle/dbflute/cbean/nss/SynonymProductNss.java | 1244 | package org.docksidestage.oracle.dbflute.cbean.nss;
import org.docksidestage.oracle.dbflute.cbean.cq.SynonymProductCQ;
/**
* The nest select set-upper of SYNONYM_PRODUCT.
* @author oracleman
*/
public class SynonymProductNss {
// ===================================================================================
// Attribute
// =========
protected final SynonymProductCQ _query;
public SynonymProductNss(SynonymProductCQ query) { _query = query; }
public boolean hasConditionQuery() { return _query != null; }
// ===================================================================================
// Nested Relation
// ===============
/**
* With nested relation columns to select clause. <br>
* SYNONYM_PRODUCT_STATUS by my PRODUCT_STATUS_CODE, named 'synonymProductStatus'.
*/
public void withSynonymProductStatus() {
_query.xdoNss(() -> _query.querySynonymProductStatus());
}
}
| apache-2.0 |
Simplement/mailqueue | src/Bridges/SymfonyConsole/MailSendCommand.php | 2299 | <?php
namespace Simplement\Bridges\SymfonyConsole;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Input\InputArgument;
/**
* Description of MailSendCommand
*
* @author Martin Dendis <martin.dendis@improvisio.cz>
*/
class MailSendCommand extends Command {
/** @var \Nette\Mail\IMailer @inject */
public $mailer;
/** @var string */
private $config;
public function setConfig(array $config) {
$this->config = $config;
}
protected function configure() {
$this->setName('mailqueue:send')
->setDescription('Send mail to given email address. Useful to determine whether mail queue is working properly')
->addArgument('recipient', InputArgument::REQUIRED, 'Recipient email')
->addOption('from', 'f', InputOption::VALUE_REQUIRED, 'Sender email', $this->config['defaultSender'])
->addOption('subject', 's', InputOption::VALUE_REQUIRED, 'Email subject', '')
->addOption('message', 'm', InputOption::VALUE_REQUIRED, 'Email message', '')
->addOption('no-queue', 'n', InputOption::VALUE_NONE, 'Skip mail queue & send mail directly.');
}
protected function execute(InputInterface $input, OutputInterface $output) {
$message = new \Nette\Mail\Message;
if (!$this->validateEmail($from = trim($input->getOption('from')))) {
$output->writeln('<error>Expected valid email address, given "' . $from . '"</error>');
return 1;
}
$message->setFrom($from);
if (!$this->validateEmail($to = trim($input->getArgument('recipient')))) {
$output->writeln('<error>Expected valid email address, given "' . $to . '"</error>');
return 1;
}
$message->addTo($to);
$message->setSubject($input->getOption('subject'));
$message->setBody($input->getOption('message'));
if ($input->getOption('force') && $this->mailer instanceof \Simplement\MailQueue\Mailer) {
$this->mailer->send($message, 1, FALSE);
} else {
$this->mailer->send($message);
}
}
private function validateEmail($email) {
if (preg_match('#^(.+) +<(.*)>\z#', $email, $matches)) {
$email = $matches[2];
} else {
$email = $email;
}
return \Nette\Utils\Validators::isEmail($email);
}
}
| apache-2.0 |
revdaalex/learn_java | chapter3/TicTacToe/src/main/java/ru/revdaalex/tictactoe/model/Board.java | 1998 | package ru.revdaalex.tictactoe.model;
import java.util.ArrayList;
import java.util.List;
/**
* Game board.
* Created by revdaalex on 09.12.2016.
*/
public class Board {
List<Point> availablePoints;
int[][] board = new int[3][3];
public Board() {
}
public boolean isWin() {
return (XWin() || OWin() || getFreeSteps().isEmpty());
}
public boolean XWin() {
if ((board[0][0] == board[1][1] && board[0][0] == board[2][2] && board[0][0] == 1) || (board[0][2] == board[1][1] && board[0][2] == board[2][0] && board[0][2] == 1)) {
return true;
}
for (int i = 0; i < 3; ++i) {
if (((board[i][0] == board[i][1] && board[i][0] == board[i][2] && board[i][0] == 1)
|| (board[0][i] == board[1][i] && board[0][i] == board[2][i] && board[0][i] == 1))) {
return true;
}
}
return false;
}
public boolean OWin() {
if ((board[0][0] == board[1][1] && board[0][0] == board[2][2] && board[0][0] == 2) || (board[0][2] == board[1][1] && board[0][2] == board[2][0] && board[0][2] == 2)) {
return true;
}
for (int i = 0; i < 3; ++i) {
if ((board[i][0] == board[i][1] && board[i][0] == board[i][2] && board[i][0] == 2)
|| (board[0][i] == board[1][i] && board[0][i] == board[2][i] && board[0][i] == 2)) {
return true;
}
}
return false;
}
private List<Point> getFreeSteps() {
availablePoints = new ArrayList<>();
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
if (board[i][j] == 0) {
availablePoints.add(new Point(i, j));
}
}
}
return availablePoints;
}
public void placeAMove(Point point, int player) {
board[point.x][point.y] = player; //player = 1 играет за X, 2 играет за O
}
}
| apache-2.0 |
blackcathacker/kc.preclean | coeus-code/src/main/java/org/kuali/kra/irb/personnel/ProtocolPersonRoleValuesFinder.java | 3593 | /*
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.irb.personnel;
import org.kuali.kra.protocol.personnel.ProtocolPersonRoleMappingBase;
import org.kuali.rice.core.api.util.ConcreteKeyValue;
import org.kuali.rice.core.api.util.KeyValue;
import org.kuali.rice.krad.uif.control.UifKeyValuesFinderBase;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.kuali.coeus.sys.framework.service.KcServiceLocator.getService;
/**
* This class is to get valid values for protocol person role
* based on an assigned role. A source role can be change only to
* specific target roles in the list. This list is obtained from
* person role mapping. Include source role first and then start adding
* target roles to the list.
*/
public class ProtocolPersonRoleValuesFinder extends UifKeyValuesFinderBase {
private String sourceRoleId;
private String sourceRoleReferenceObject = "sourceRole";
private String targetRoleReferenceObject = "targetRole";
@Override
public List<KeyValue> getKeyValues() {
final List<ProtocolPersonRoleMappingBase> validPersonRoles = getProtocolPersonnelService().getPersonRoleMapping(getSourceRoleId());
List<ConcreteKeyValue> keyValues = new ArrayList<ConcreteKeyValue>();
keyValues.add(new ConcreteKeyValue(getSourceRoleId(), getSourceRoleDescription()));
for(ProtocolPersonRoleMappingBase protocolPersonRole : validPersonRoles) {
keyValues.add(new ConcreteKeyValue(protocolPersonRole.getTargetRoleId(), getTargetRoleDescription(protocolPersonRole)));
}
Collections.sort(keyValues);
List<KeyValue> returnKeyValues = new ArrayList<KeyValue>();
returnKeyValues.addAll(keyValues);
return returnKeyValues;
}
/**
* This method is used to lookup the source role object and return description
* @return String - source role name
*/
private String getSourceRoleDescription() {
return getProtocolPersonnelService().getProtocolPersonRole(getSourceRoleId()).getDescription();
}
/**
* This method is used to refresh target role object and return description
* @param protocolPersonRole
* @return String - target role name
*/
private String getTargetRoleDescription(ProtocolPersonRoleMappingBase protocolPersonRole) {
protocolPersonRole.refreshReferenceObject(targetRoleReferenceObject);
return protocolPersonRole.getTargetRole().getDescription();
}
/**
* Locate from Spring a singleton instance of the <code>{@link ProtocolPersonnelService}</code>.
*
* @return ProtocolPersonnelService
*/
private ProtocolPersonnelService getProtocolPersonnelService() {
return getService(ProtocolPersonnelService.class);
}
public String getSourceRoleId() {
return sourceRoleId;
}
public void setSourceRoleId(String sourceRoleId) {
this.sourceRoleId = sourceRoleId;
}
}
| apache-2.0 |
Kirtish/spring-data-jpa | src/main/java/com/rsoft/app/services/impl/RoleServiceImpl.java | 1154 | package com.rsoft.app.services.impl;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import com.rsoft.app.domain.Role;
import com.rsoft.app.repositories.RoleRepository;
import com.rsoft.app.services.IRoleService;
@Service
@Transactional(propagation = Propagation.REQUIRED)
public class RoleServiceImpl implements IRoleService{
@Autowired
RoleRepository roleRepository;
@Override
public List<Role> getRoles() {
List<Role> roles = new ArrayList<Role>();
Iterator<Role> itr = roleRepository.findAll().iterator();
while(itr.hasNext())
roles.add(itr.next());
return roles;
}
@Override
public Role getRole(Long id) {
return roleRepository.findOne(id);
}
@Override
public Role save(Role role) {
return roleRepository.save(role);
}
@Override
public Role delete(Long id) {
Role role = getRole(id);
roleRepository.delete(id);
return role;
}
}
| apache-2.0 |
hpejcinovic-pivotal/chaos-lemur | src/test/java/io/pivotal/strepsirrhini/chaoslemur/infrastructure/OpenStackInfrastructureTest.java | 2079 | /*
* Copyright 2014-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pivotal.strepsirrhini.chaoslemur.infrastructure;
import io.pivotal.strepsirrhini.chaoslemur.Member;
import org.jclouds.openstack.nova.v2_0.NovaApi;
import org.jclouds.openstack.nova.v2_0.features.ServerApi;
import org.junit.Before;
import org.junit.Test;
import java.util.Arrays;
import java.util.HashSet;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public final class OpenStackInfrastructureTest {
private final DirectorUtils directorUtils = mock(DirectorUtils.class);
private final Member member = new Member("test-id", "test-deployment", "test-job", "test-name");
private final NovaApi novaApi = mock(NovaApi.class);
private final ServerApi serverApi = mock(ServerApi.class);
private final OpenStackInfrastructure infrastructure = new OpenStackInfrastructure(this.directorUtils, this.novaApi);
@Test
public void destroy() throws DestructionException {
when(this.novaApi.getConfiguredRegions()).thenReturn(new HashSet<>(Arrays.asList("test-region-1", "test-region-2")));
this.infrastructure.destroy(this.member);
verify(this.serverApi, times(2)).stop("test-id");
}
@Before
public void setUp() throws Exception {
when(this.novaApi.getServerApi(any(String.class))).thenReturn(this.serverApi);
}
}
| apache-2.0 |
ONLYOFFICE/CommunityServer | redistributable/docusign-csharp-client/DocuSign.eSign/Model/Folder.cs | 11773 | /*
* DocuSign REST API
*
* The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
*
* OpenAPI spec version: v2
* Contact: devcenter@docusign.com
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
namespace DocuSign.eSign.Model
{
/// <summary>
/// Folder
/// </summary>
[DataContract]
public partial class Folder : IEquatable<Folder>, IValidatableObject
{
public Folder()
{
// Empty Constructor
}
/// <summary>
/// Initializes a new instance of the <see cref="Folder" /> class.
/// </summary>
/// <param name="ErrorDetails">ErrorDetails.</param>
/// <param name="Filter">Filter.</param>
/// <param name="FolderId">.</param>
/// <param name="Folders">A collection of folder objects returned in a response..</param>
/// <param name="Name">.</param>
/// <param name="OwnerEmail">.</param>
/// <param name="OwnerUserId">.</param>
/// <param name="OwnerUserName">.</param>
/// <param name="ParentFolderId">.</param>
/// <param name="ParentFolderUri">.</param>
/// <param name="Type">.</param>
/// <param name="Uri">.</param>
public Folder(ErrorDetails ErrorDetails = default(ErrorDetails), Filter Filter = default(Filter), string FolderId = default(string), List<Folder> Folders = default(List<Folder>), string Name = default(string), string OwnerEmail = default(string), string OwnerUserId = default(string), string OwnerUserName = default(string), string ParentFolderId = default(string), string ParentFolderUri = default(string), string Type = default(string), string Uri = default(string))
{
this.ErrorDetails = ErrorDetails;
this.Filter = Filter;
this.FolderId = FolderId;
this.Folders = Folders;
this.Name = Name;
this.OwnerEmail = OwnerEmail;
this.OwnerUserId = OwnerUserId;
this.OwnerUserName = OwnerUserName;
this.ParentFolderId = ParentFolderId;
this.ParentFolderUri = ParentFolderUri;
this.Type = Type;
this.Uri = Uri;
}
/// <summary>
/// Gets or Sets ErrorDetails
/// </summary>
[DataMember(Name="errorDetails", EmitDefaultValue=false)]
public ErrorDetails ErrorDetails { get; set; }
/// <summary>
/// Gets or Sets Filter
/// </summary>
[DataMember(Name="filter", EmitDefaultValue=false)]
public Filter Filter { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="folderId", EmitDefaultValue=false)]
public string FolderId { get; set; }
/// <summary>
/// A collection of folder objects returned in a response.
/// </summary>
/// <value>A collection of folder objects returned in a response.</value>
[DataMember(Name="folders", EmitDefaultValue=false)]
public List<Folder> Folders { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="name", EmitDefaultValue=false)]
public string Name { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="ownerEmail", EmitDefaultValue=false)]
public string OwnerEmail { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="ownerUserId", EmitDefaultValue=false)]
public string OwnerUserId { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="ownerUserName", EmitDefaultValue=false)]
public string OwnerUserName { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="parentFolderId", EmitDefaultValue=false)]
public string ParentFolderId { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="parentFolderUri", EmitDefaultValue=false)]
public string ParentFolderUri { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="type", EmitDefaultValue=false)]
public string Type { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="uri", EmitDefaultValue=false)]
public string Uri { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class Folder {\n");
sb.Append(" ErrorDetails: ").Append(ErrorDetails).Append("\n");
sb.Append(" Filter: ").Append(Filter).Append("\n");
sb.Append(" FolderId: ").Append(FolderId).Append("\n");
sb.Append(" Folders: ").Append(Folders).Append("\n");
sb.Append(" Name: ").Append(Name).Append("\n");
sb.Append(" OwnerEmail: ").Append(OwnerEmail).Append("\n");
sb.Append(" OwnerUserId: ").Append(OwnerUserId).Append("\n");
sb.Append(" OwnerUserName: ").Append(OwnerUserName).Append("\n");
sb.Append(" ParentFolderId: ").Append(ParentFolderId).Append("\n");
sb.Append(" ParentFolderUri: ").Append(ParentFolderUri).Append("\n");
sb.Append(" Type: ").Append(Type).Append("\n");
sb.Append(" Uri: ").Append(Uri).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as Folder);
}
/// <summary>
/// Returns true if Folder instances are equal
/// </summary>
/// <param name="other">Instance of Folder to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(Folder other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.ErrorDetails == other.ErrorDetails ||
this.ErrorDetails != null &&
this.ErrorDetails.Equals(other.ErrorDetails)
) &&
(
this.Filter == other.Filter ||
this.Filter != null &&
this.Filter.Equals(other.Filter)
) &&
(
this.FolderId == other.FolderId ||
this.FolderId != null &&
this.FolderId.Equals(other.FolderId)
) &&
(
this.Folders == other.Folders ||
this.Folders != null &&
this.Folders.SequenceEqual(other.Folders)
) &&
(
this.Name == other.Name ||
this.Name != null &&
this.Name.Equals(other.Name)
) &&
(
this.OwnerEmail == other.OwnerEmail ||
this.OwnerEmail != null &&
this.OwnerEmail.Equals(other.OwnerEmail)
) &&
(
this.OwnerUserId == other.OwnerUserId ||
this.OwnerUserId != null &&
this.OwnerUserId.Equals(other.OwnerUserId)
) &&
(
this.OwnerUserName == other.OwnerUserName ||
this.OwnerUserName != null &&
this.OwnerUserName.Equals(other.OwnerUserName)
) &&
(
this.ParentFolderId == other.ParentFolderId ||
this.ParentFolderId != null &&
this.ParentFolderId.Equals(other.ParentFolderId)
) &&
(
this.ParentFolderUri == other.ParentFolderUri ||
this.ParentFolderUri != null &&
this.ParentFolderUri.Equals(other.ParentFolderUri)
) &&
(
this.Type == other.Type ||
this.Type != null &&
this.Type.Equals(other.Type)
) &&
(
this.Uri == other.Uri ||
this.Uri != null &&
this.Uri.Equals(other.Uri)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.ErrorDetails != null)
hash = hash * 59 + this.ErrorDetails.GetHashCode();
if (this.Filter != null)
hash = hash * 59 + this.Filter.GetHashCode();
if (this.FolderId != null)
hash = hash * 59 + this.FolderId.GetHashCode();
if (this.Folders != null)
hash = hash * 59 + this.Folders.GetHashCode();
if (this.Name != null)
hash = hash * 59 + this.Name.GetHashCode();
if (this.OwnerEmail != null)
hash = hash * 59 + this.OwnerEmail.GetHashCode();
if (this.OwnerUserId != null)
hash = hash * 59 + this.OwnerUserId.GetHashCode();
if (this.OwnerUserName != null)
hash = hash * 59 + this.OwnerUserName.GetHashCode();
if (this.ParentFolderId != null)
hash = hash * 59 + this.ParentFolderId.GetHashCode();
if (this.ParentFolderUri != null)
hash = hash * 59 + this.ParentFolderUri.GetHashCode();
if (this.Type != null)
hash = hash * 59 + this.Type.GetHashCode();
if (this.Uri != null)
hash = hash * 59 + this.Uri.GetHashCode();
return hash;
}
}
public IEnumerable<ValidationResult> Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| apache-2.0 |
TomGrill/gdx-firebase | desktop/src/de/tomgrill/gdxfirebase/desktop/auth/DesktopTask.java | 909 | package de.tomgrill.gdxfirebase.desktop.auth;
import de.tomgrill.gdxfirebase.core.auth.GdxFirebaseException;
import de.tomgrill.gdxfirebase.core.auth.OnCompleteListener;
import de.tomgrill.gdxfirebase.core.auth.Task;
public class DesktopTask<TResult> implements Task<TResult> {
private final boolean complete;
private final boolean successful;
public DesktopTask(boolean complete, boolean successful) {
this.complete = complete;
this.successful = successful;
}
@Override
public boolean isComplete() {
return complete;
}
@Override
public boolean isSuccessful() {
return successful;
}
@Override
public Task<TResult> addOnCompleteListener(OnCompleteListener<TResult> var1) {
return null;
}
@Override
public Exception getException() {
return new GdxFirebaseException("desktop exception");
}
}
| apache-2.0 |
evenjn/yarn | src/main/java/org/github/evenjn/yarn/IteratorMap.java | 1550 | /**
*
* Copyright 2017 Marco Trevisan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.github.evenjn.yarn;
import java.util.Iterator;
/**
* <h1>IteratorMap</h1>
*
* <p>
* An {@code IteratorMap} is a {@linkplain org.github.evenjn.yarn.YarnMap
* YarnMap} that provides access to output objects via {@link java.util.Iterator
* Iterator} containers.
* </p>
*
* <p>
* This interface is part of package {@link org.github.evenjn.yarn Yarn}.
* </p>
*
* @param <I>
* The type of input objects.
* @param <O>
* The type of output objects.
* @since 1.0
*/
@FunctionalInterface
public interface IteratorMap<I, O> extends
YarnMap<I, O, Iterator<O>> {
/**
* <p>
* {@code get} returns an {@link java.util.Iterator Iterator} with the output
* objects associated to the argument {@code input}.
* </p>
*
* @param input
* An input object.
* @return An {@link java.util.Iterator Iterator} of output objects.
* @since 1.0
*/
@Override
Iterator<O> get( I input );
}
| apache-2.0 |
yahoo/pulsar | pulsar-broker/src/test/java/org/apache/pulsar/compaction/CompactorTest.java | 8866 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.compaction;
import static org.apache.pulsar.client.impl.RawReaderTest.extractKey;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import io.netty.buffer.ByteBuf;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import org.apache.bookkeeper.client.BookKeeper;
import org.apache.bookkeeper.client.LedgerEntry;
import org.apache.bookkeeper.client.LedgerHandle;
import org.apache.pulsar.broker.auth.MockedPulsarServiceBaseTest;
import org.apache.pulsar.client.api.MessageRoutingMode;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.RawMessage;
import org.apache.pulsar.client.impl.RawMessageImpl;
import org.apache.pulsar.common.protocol.Commands;
import org.apache.pulsar.common.policies.data.ClusterData;
import org.apache.pulsar.common.policies.data.TenantInfo;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@Test(groups = "broker-compaction")
public class CompactorTest extends MockedPulsarServiceBaseTest {
private ScheduledExecutorService compactionScheduler;
@BeforeMethod
@Override
public void setup() throws Exception {
super.internalSetup();
admin.clusters().createCluster("use",
new ClusterData(pulsar.getWebServiceAddress()));
admin.tenants().createTenant("my-property",
new TenantInfo(Sets.newHashSet("appid1", "appid2"), Sets.newHashSet("use")));
admin.namespaces().createNamespace("my-property/use/my-ns");
compactionScheduler = Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder().setNameFormat("compactor").setDaemon(true).build());
}
@AfterMethod(alwaysRun = true)
@Override
public void cleanup() throws Exception {
super.internalCleanup();
compactionScheduler.shutdownNow();
}
private List<String> compactAndVerify(String topic, Map<String, byte[]> expected) throws Exception {
BookKeeper bk = pulsar.getBookKeeperClientFactory().create(
this.conf, null, null, Optional.empty(), null);
Compactor compactor = new TwoPhaseCompactor(conf, pulsarClient, bk, compactionScheduler);
long compactedLedgerId = compactor.compact(topic).get();
LedgerHandle ledger = bk.openLedger(compactedLedgerId,
Compactor.COMPACTED_TOPIC_LEDGER_DIGEST_TYPE,
Compactor.COMPACTED_TOPIC_LEDGER_PASSWORD);
Assert.assertEquals(ledger.getLastAddConfirmed() + 1, // 0..lac
expected.size(),
"Should have as many entries as there is keys");
List<String> keys = new ArrayList<>();
Enumeration<LedgerEntry> entries = ledger.readEntries(0, ledger.getLastAddConfirmed());
while (entries.hasMoreElements()) {
ByteBuf buf = entries.nextElement().getEntryBuffer();
RawMessage m = RawMessageImpl.deserializeFrom(buf);
String key = extractKey(m);
keys.add(key);
ByteBuf payload = extractPayload(m);
byte[] bytes = new byte[payload.readableBytes()];
payload.readBytes(bytes);
Assert.assertEquals(bytes, expected.remove(key),
"Compacted version should match expected version");
m.close();
}
Assert.assertTrue(expected.isEmpty(), "All expected keys should have been found");
return keys;
}
@Test
public void testCompaction() throws Exception {
String topic = "persistent://my-property/use/my-ns/my-topic1";
final int numMessages = 1000;
final int maxKeys = 10;
Producer<byte[]> producer = pulsarClient.newProducer().topic(topic)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
Map<String, byte[]> expected = new HashMap<>();
Random r = new Random(0);
for (int j = 0; j < numMessages; j++) {
int keyIndex = r.nextInt(maxKeys);
String key = "key"+keyIndex;
byte[] data = ("my-message-" + key + "-" + j).getBytes();
producer.newMessage()
.key(key)
.value(data)
.send();
expected.put(key, data);
}
compactAndVerify(topic, expected);
}
@Test
public void testCompactAddCompact() throws Exception {
String topic = "persistent://my-property/use/my-ns/my-topic1";
Producer<byte[]> producer = pulsarClient.newProducer().topic(topic)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
Map<String, byte[]> expected = new HashMap<>();
producer.newMessage()
.key("a")
.value("A_1".getBytes())
.send();
producer.newMessage()
.key("b")
.value("B_1".getBytes())
.send();
producer.newMessage()
.key("a")
.value("A_2".getBytes())
.send();
expected.put("a", "A_2".getBytes());
expected.put("b", "B_1".getBytes());
compactAndVerify(topic, new HashMap<>(expected));
producer.newMessage()
.key("b")
.value("B_2".getBytes())
.send();
expected.put("b", "B_2".getBytes());
compactAndVerify(topic, expected);
}
@Test
public void testCompactedInOrder() throws Exception {
String topic = "persistent://my-property/use/my-ns/my-topic1";
Producer<byte[]> producer = pulsarClient.newProducer().topic(topic)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
producer.newMessage()
.key("c")
.value("C_1".getBytes()).send();
producer.newMessage()
.key("a")
.value("A_1".getBytes()).send();
producer.newMessage()
.key("b")
.value("B_1".getBytes()).send();
producer.newMessage()
.key("a")
.value("A_2".getBytes()).send();
Map<String, byte[]> expected = new HashMap<>();
expected.put("a", "A_2".getBytes());
expected.put("b", "B_1".getBytes());
expected.put("c", "C_1".getBytes());
List<String> keyOrder = compactAndVerify(topic, expected);
Assert.assertEquals(keyOrder, Lists.newArrayList("c", "b", "a"));
}
@Test
public void testCompactEmptyTopic() throws Exception {
String topic = "persistent://my-property/use/my-ns/my-topic1";
// trigger creation of topic on server side
pulsarClient.newConsumer().topic(topic).subscriptionName("sub1").subscribe().close();
BookKeeper bk = pulsar.getBookKeeperClientFactory().create(
this.conf, null, null, Optional.empty(), null);
Compactor compactor = new TwoPhaseCompactor(conf, pulsarClient, bk, compactionScheduler);
compactor.compact(topic).get();
}
public ByteBuf extractPayload(RawMessage m) throws Exception {
ByteBuf payloadAndMetadata = m.getHeadersAndPayload();
Commands.skipChecksumIfPresent(payloadAndMetadata);
int metadataSize = payloadAndMetadata.readInt(); // metadata size
byte[] metadata = new byte[metadataSize];
payloadAndMetadata.readBytes(metadata);
return payloadAndMetadata.slice();
}
}
| apache-2.0 |
hmrs-cr/android-wear-gopro-remote | libcommon/src/main/java/com/hmsoft/libcommon/general/Logger.java | 5789 | /*
* Copyright (C) 2014 Mauricio Rodriguez (ranametal@users.sf.net)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hmsoft.libcommon.general;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import android.content.Context;
import android.os.Environment;
import android.util.Log;
import com.hmsoft.libcommon.BuildConfig;
public final class Logger {
//private static final String TAG = "Logger";
private static final String APP_TAG = "HMSOFT:";
public static final boolean DEBUG = BuildConfig.DEBUG;
public static final boolean WARNING = true;
public static final boolean INFO = true;
public static final boolean ERROR = true;
public static final String DEBUG_TAG = "DEBUG";
public static final String WARNING_TAG = "WARNING";
public static final String INFO_TAG = "INFO";
public static final String ERROR_TAG = "ERROR";
private static final String LOG_FILE = "log-%s.log";
private static final String LOGS_FOLDER = "logs";
private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMdd'T'HHmmss", Locale.US);
private static final SimpleDateFormat LOG_DATE_FORMAT = new SimpleDateFormat("yyyyMMdd", Locale.US);
private static File sLogsFolder = null;
public static void init(Context context) {
sLogsFolder = context.getExternalFilesDir(LOGS_FOLDER);
}
public static File getLogFolder() {
if(sLogsFolder == null) {
sLogsFolder = new File(Environment.getExternalStorageDirectory() +
"/Android/data/" + BuildConfig.APPLICATION_ID, LOGS_FOLDER);
}
return sLogsFolder;
}
public static void log2file(String tag, String msg, String fileName, Throwable e) {
try {
getLogFolder();
Date now = new Date();
File file = new File(sLogsFolder, String.format(fileName, LOG_DATE_FORMAT.format(now)));
FileOutputStream os = new FileOutputStream(file, true);
try (OutputStreamWriter writer = new OutputStreamWriter(os)) {
writer.append(SIMPLE_DATE_FORMAT.format(now));
writer.append("\t");
writer.append(tag);
writer.append("\t");
writer.append(msg);
writer.append("\t");
if (e != null)
writer.append(e.toString());
writer.append("\n");
writer.flush();
}
} catch (IOException ex) {
//Log.w(TAG, "log2file failed:", ex);
}
}
public static void debug(String tag, String msg) {
if(DEBUG) {
Log.d(APP_TAG + tag, msg);
log2file(tag + "\t" + DEBUG_TAG, msg, LOG_FILE, null);
}
}
public static void debug(String tag, String msg, Throwable e) {
if(DEBUG) {
Log.d(APP_TAG + tag, msg, e);
log2file(tag + "\t" + DEBUG_TAG, msg, LOG_FILE, e);
}
}
public static void debug(String tag, String msg, Object... args) {
if(DEBUG) {
msg = String.format(msg, args);
Log.d(APP_TAG + tag, String.format(msg, args));
log2file(tag + "\t" + DEBUG_TAG, msg, LOG_FILE, null);
}
}
public static void error(String tag, String msg) {
if(ERROR) {
Log.e(tag, msg);
log2file(tag + "\t" + ERROR_TAG, msg, LOG_FILE, null);
}
}
public static void error(String tag, String msg, Throwable e) {
if(ERROR) {
Log.e(tag, msg, e);
log2file(tag + "\t" + ERROR_TAG, msg, LOG_FILE, e);
}
}
//
// public static void error(String tag, String msg, Object... args) {
// if(ERROR) {
// msg = String.format(msg, args);
// Log.e(APP_TAG + tag, String.format(msg, args));
// log2file(tag + "\t" + ERROR_TAG, msg, LOG_FILE, null);
// }
// }
public static void warning(String tag, String msg) {
if(WARNING) {
Log.w(APP_TAG + tag, msg, null);
log2file(tag + "\t" + WARNING_TAG, msg, LOG_FILE, null);
}
}
public static void warning(String tag, String msg, Throwable e) {
if(WARNING) {
Log.w(APP_TAG + tag, msg, e);
log2file(tag + "\t" + WARNING_TAG, msg, LOG_FILE, e);
}
}
public static void warning(String tag, String msg, Object... args) {
if(WARNING) {
msg = String.format(msg, args);
Log.w(APP_TAG + tag, String.format(msg, args));
log2file(tag + "\t" + WARNING_TAG, msg, LOG_FILE, null);
}
}
public static void info(String tag, String msg) {
if(INFO) {
Log.i(APP_TAG + tag, msg);
log2file(tag + "\t" + INFO_TAG, msg, LOG_FILE, null);
}
}
//
// public static void info(String tag, String msg, Object... args) {
// if(INFO) {
// msg = String.format(msg, args);
// Log.i(APP_TAG + tag, String.format(msg, args));
// log2file(tag + "\t" + INFO_TAG, msg, LOG_FILE, null);
// }
// }
}
| apache-2.0 |
aljoscha/flink | flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/stream/StreamExecSortLimit.java | 2740 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.exec.stream;
import org.apache.flink.api.dag.Transformation;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.planner.delegation.PlannerBase;
import org.apache.flink.table.planner.plan.nodes.exec.ExecEdge;
import org.apache.flink.table.planner.plan.nodes.exec.utils.PartitionSpec;
import org.apache.flink.table.planner.plan.nodes.exec.utils.SortSpec;
import org.apache.flink.table.planner.plan.utils.RankProcessStrategy;
import org.apache.flink.table.runtime.operators.rank.ConstantRankRange;
import org.apache.flink.table.runtime.operators.rank.RankType;
import org.apache.flink.table.types.logical.RowType;
/** {@link StreamExecNode} for Sort with limit. */
public class StreamExecSortLimit extends StreamExecRank {
private final long limitEnd;
public StreamExecSortLimit(
SortSpec sortSpec,
long limitStart,
long limitEnd,
RankProcessStrategy rankStrategy,
boolean generateUpdateBefore,
ExecEdge inputEdge,
RowType outputType,
String description) {
super(
RankType.ROW_NUMBER,
PartitionSpec.ALL_IN_ONE,
sortSpec,
new ConstantRankRange(limitStart + 1, limitEnd),
rankStrategy,
false,
generateUpdateBefore,
inputEdge,
outputType,
description);
this.limitEnd = limitEnd;
}
@Override
protected Transformation<RowData> translateToPlanInternal(PlannerBase planner) {
if (limitEnd == Long.MAX_VALUE) {
throw new TableException(
"FETCH is missed, which on streaming table is not supported currently.");
}
return super.translateToPlanInternal(planner);
}
}
| apache-2.0 |
dhutchis/accumulo | server/base/src/main/java/org/apache/accumulo/server/util/DeleteZooInstance.java | 3085 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.server.util;
import static java.nio.charset.StandardCharsets.UTF_8;
import java.util.HashSet;
import java.util.Set;
import org.apache.accumulo.core.Constants;
import org.apache.accumulo.core.cli.Help;
import org.apache.accumulo.fate.zookeeper.IZooReaderWriter;
import org.apache.accumulo.fate.zookeeper.ZooUtil.NodeMissingPolicy;
import org.apache.accumulo.server.zookeeper.ZooReaderWriter;
import org.apache.zookeeper.KeeperException;
import com.beust.jcommander.Parameter;
public class DeleteZooInstance {
static class Opts extends Help {
@Parameter(names = {"-i", "--instance"}, description = "the instance name or id to delete")
String instance;
}
static void deleteRetry(IZooReaderWriter zk, String path) throws Exception {
for (int i = 0; i < 10; i++) {
try {
zk.recursiveDelete(path, NodeMissingPolicy.SKIP);
return;
} catch (KeeperException.NotEmptyException ex) {
// ignored
} catch (Exception ex) {
throw ex;
}
}
}
/**
* @param args
* : the name or UUID of the instance to be deleted
*/
public static void main(String[] args) throws Exception {
Opts opts = new Opts();
opts.parseArgs(DeleteZooInstance.class.getName(), args);
IZooReaderWriter zk = ZooReaderWriter.getInstance();
// try instance name:
Set<String> instances = new HashSet<String>(zk.getChildren(Constants.ZROOT + Constants.ZINSTANCES));
Set<String> uuids = new HashSet<String>(zk.getChildren(Constants.ZROOT));
uuids.remove("instances");
if (instances.contains(opts.instance)) {
String path = Constants.ZROOT + Constants.ZINSTANCES + "/" + opts.instance;
byte[] data = zk.getData(path, null);
deleteRetry(zk, path);
deleteRetry(zk, Constants.ZROOT + "/" + new String(data, UTF_8));
} else if (uuids.contains(opts.instance)) {
// look for the real instance name
for (String instance : instances) {
String path = Constants.ZROOT + Constants.ZINSTANCES + "/" + instance;
byte[] data = zk.getData(path, null);
if (opts.instance.equals(new String(data, UTF_8)))
deleteRetry(zk, path);
}
deleteRetry(zk, Constants.ZROOT + "/" + opts.instance);
}
}
}
| apache-2.0 |
consulo/consulo-java | java-analysis-impl/src/main/java/com/intellij/codeInsight/guess/impl/GuessManagerImpl.java | 22951 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.guess.impl;
import com.intellij.codeInsight.guess.GuessManager;
import com.intellij.codeInspection.dataFlow.*;
import com.intellij.codeInspection.dataFlow.instructions.InstanceofInstruction;
import com.intellij.codeInspection.dataFlow.instructions.TypeCastInstruction;
import com.intellij.codeInspection.dataFlow.value.DfaValue;
import com.intellij.codeInspection.dataFlow.value.DfaVariableValue;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.PsiElementProcessor;
import com.intellij.psi.search.PsiElementProcessorAdapter;
import com.intellij.psi.search.SearchScope;
import com.intellij.psi.search.searches.ClassInheritorsSearch;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.*;
import com.intellij.util.ArrayUtil;
import com.intellij.util.BitUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.siyeh.ig.callMatcher.CallMatcher;
import com.siyeh.ig.psiutils.ExpressionUtils;
import jakarta.inject.Inject;
import jakarta.inject.Singleton;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.*;
@Singleton
public final class GuessManagerImpl extends GuessManager
{
private final MethodPatternMap myMethodPatternMap = new MethodPatternMap();
private final Project myProject;
@Inject
public GuessManagerImpl(Project project)
{
myProject = project;
initMethodPatterns();
}
private void initMethodPatterns()
{
// Collection
myMethodPatternMap.addPattern(new MethodPattern("add", 1, 0));
myMethodPatternMap.addPattern(new MethodPattern("contains", 1, 0));
myMethodPatternMap.addPattern(new MethodPattern("remove", 1, 0));
// Vector
myMethodPatternMap.addPattern(new MethodPattern("add", 2, 1));
myMethodPatternMap.addPattern(new MethodPattern("addElement", 1, 0));
myMethodPatternMap.addPattern(new MethodPattern("elementAt", 1, -1));
myMethodPatternMap.addPattern(new MethodPattern("firstElement", 0, -1));
myMethodPatternMap.addPattern(new MethodPattern("lastElement", 0, -1));
myMethodPatternMap.addPattern(new MethodPattern("get", 1, -1));
myMethodPatternMap.addPattern(new MethodPattern("indexOf", 1, 0));
myMethodPatternMap.addPattern(new MethodPattern("indexOf", 2, 0));
myMethodPatternMap.addPattern(new MethodPattern("lastIndexOf", 1, 0));
myMethodPatternMap.addPattern(new MethodPattern("lastIndexOf", 2, 0));
myMethodPatternMap.addPattern(new MethodPattern("insertElementAt", 2, 0));
myMethodPatternMap.addPattern(new MethodPattern("removeElement", 1, 0));
myMethodPatternMap.addPattern(new MethodPattern("set", 2, 1));
myMethodPatternMap.addPattern(new MethodPattern("setElementAt", 2, 0));
}
@Override
@Nonnull
public PsiType[] guessContainerElementType(PsiExpression containerExpr, TextRange rangeToIgnore)
{
HashSet<PsiType> typesSet = new HashSet<>();
PsiType type = containerExpr.getType();
PsiType elemType;
if((elemType = getGenericElementType(type)) != null)
{
return new PsiType[]{elemType};
}
if(containerExpr instanceof PsiReferenceExpression)
{
PsiElement refElement = ((PsiReferenceExpression) containerExpr).resolve();
if(refElement instanceof PsiVariable)
{
PsiFile file = refElement.getContainingFile();
if(file == null)
{
file = containerExpr.getContainingFile(); // implicit variable in jsp
}
HashSet<PsiVariable> checkedVariables = new HashSet<>();
addTypesByVariable(typesSet, (PsiVariable) refElement, file, checkedVariables, CHECK_USAGE | CHECK_DOWN, rangeToIgnore);
checkedVariables.clear();
addTypesByVariable(typesSet, (PsiVariable) refElement, file, checkedVariables, CHECK_UP, rangeToIgnore);
}
}
return typesSet.toArray(PsiType.createArray(typesSet.size()));
}
@Nullable
private static PsiType getGenericElementType(PsiType collectionType)
{
if(collectionType instanceof PsiClassType)
{
PsiClassType classType = (PsiClassType) collectionType;
PsiType[] parameters = classType.getParameters();
if(parameters.length == 1)
{
return parameters[0];
}
}
return null;
}
@Override
@Nonnull
public PsiType[] guessTypeToCast(PsiExpression expr)
{
LinkedHashSet<PsiType> types = new LinkedHashSet<>(getControlFlowExpressionTypeConjuncts(expr));
addExprTypesWhenContainerElement(types, expr);
addExprTypesByDerivedClasses(types, expr);
return types.toArray(PsiType.createArray(types.size()));
}
@Nonnull
@Override
public MultiMap<PsiExpression, PsiType> getControlFlowExpressionTypes(@Nonnull PsiExpression forPlace, boolean honorAssignments)
{
PsiElement scope = DfaPsiUtil.getTopmostBlockInSameClass(forPlace);
if(scope == null)
{
PsiFile file = forPlace.getContainingFile();
if(!(file instanceof PsiCodeFragment))
{
return MultiMap.empty();
}
scope = file;
}
DataFlowRunner runner = createRunner(honorAssignments, scope);
final ExpressionTypeInstructionVisitor visitor = new ExpressionTypeInstructionVisitor(forPlace);
if(runner.analyzeMethodWithInlining(scope, visitor) == RunnerResult.OK)
{
return visitor.getResult();
}
return MultiMap.empty();
}
@Nullable
private static PsiType getTypeFromDataflow(PsiExpression forPlace, boolean honorAssignments)
{
PsiType type = forPlace.getType();
TypeConstraint initial = type == null ? TypeConstraints.TOP : TypeConstraints.instanceOf(type);
PsiElement scope = DfaPsiUtil.getTopmostBlockInSameClass(forPlace);
if(scope == null)
{
PsiFile file = forPlace.getContainingFile();
if(!(file instanceof PsiCodeFragment))
{
return null;
}
scope = file;
}
DataFlowRunner runner = createRunner(honorAssignments, scope);
class Visitor extends CastTrackingVisitor
{
TypeConstraint constraint = TypeConstraints.BOTTOM;
@Override
protected void beforeExpressionPush(@Nonnull DfaValue value,
@Nonnull PsiExpression expression,
@Nullable TextRange range,
@Nonnull DfaMemoryState state)
{
if(expression == forPlace && range == null)
{
if(!(value instanceof DfaVariableValue) || ((DfaVariableValue) value).isFlushableByCalls())
{
value = runner.getFactory().getVarFactory().createVariableValue(new ExpressionVariableDescriptor(expression));
}
constraint = constraint.join(TypeConstraint.fromDfType(state.getDfType(value)));
}
super.beforeExpressionPush(value, expression, range, state);
}
@Override
boolean isInteresting(@Nonnull DfaValue value, @Nonnull PsiExpression expression)
{
return (!(value instanceof DfaVariableValue) || ((DfaVariableValue) value).isFlushableByCalls()) &&
ExpressionVariableDescriptor.EXPRESSION_HASHING_STRATEGY.equals(expression, forPlace);
}
}
final Visitor visitor = new Visitor();
if(runner.analyzeMethodWithInlining(scope, visitor) == RunnerResult.OK)
{
return visitor.constraint.meet(initial).getPsiType(scope.getProject());
}
return null;
}
@Nonnull
private static DataFlowRunner createRunner(boolean honorAssignments, PsiElement scope)
{
return honorAssignments ? new DataFlowRunner(scope.getProject()) : new DataFlowRunner(scope.getProject())
{
@Nonnull
@Override
protected DfaMemoryState createMemoryState()
{
return new AssignmentFilteringMemoryState(getFactory());
}
};
}
private static PsiElement getTopmostBlock(PsiElement scope)
{
assert scope.isValid();
PsiElement lastScope = scope;
while(true)
{
final PsiCodeBlock lastCodeBlock = PsiTreeUtil.getParentOfType(lastScope, PsiCodeBlock.class, true);
if(lastCodeBlock == null)
{
break;
}
lastScope = lastCodeBlock;
}
if(lastScope == scope)
{
PsiFile file = scope.getContainingFile();
if(file instanceof PsiCodeFragment)
{
return file;
}
}
return lastScope;
}
private void addExprTypesByDerivedClasses(LinkedHashSet<? super PsiType> set, PsiExpression expr)
{
PsiType type = expr.getType();
if(!(type instanceof PsiClassType))
{
return;
}
PsiClass refClass = PsiUtil.resolveClassInType(type);
if(refClass == null)
{
return;
}
PsiManager manager = PsiManager.getInstance(myProject);
PsiElementProcessor.CollectElementsWithLimit<PsiClass> processor = new PsiElementProcessor.CollectElementsWithLimit<>(5);
ClassInheritorsSearch.search(refClass).forEach(new PsiElementProcessorAdapter<>(processor));
if(processor.isOverflow())
{
return;
}
for(PsiClass derivedClass : processor.getCollection())
{
if(derivedClass instanceof PsiAnonymousClass)
{
continue;
}
PsiType derivedType = JavaPsiFacade.getElementFactory(manager.getProject()).createType(derivedClass);
set.add(derivedType);
}
}
private void addExprTypesWhenContainerElement(LinkedHashSet<? super PsiType> set, PsiExpression expr)
{
if(expr instanceof PsiMethodCallExpression)
{
PsiMethodCallExpression callExpr = (PsiMethodCallExpression) expr;
PsiReferenceExpression methodExpr = callExpr.getMethodExpression();
String methodName = methodExpr.getReferenceName();
MethodPattern pattern = myMethodPatternMap.findPattern(methodName, callExpr.getArgumentList().getExpressionCount());
if(pattern != null && pattern.parameterIndex < 0/* return value */)
{
PsiExpression qualifier = methodExpr.getQualifierExpression();
if(qualifier != null)
{
PsiType[] types = guessContainerElementType(qualifier, null);
for(PsiType type : types)
{
if(type instanceof PsiClassType)
{
if(((PsiClassType) type).resolve() instanceof PsiAnonymousClass)
{
continue;
}
}
set.add(type);
}
}
}
}
}
private static final int CHECK_USAGE = 0x01;
private static final int CHECK_UP = 0x02;
private static final int CHECK_DOWN = 0x04;
private void addTypesByVariable(HashSet<? super PsiType> typesSet,
PsiVariable var,
PsiFile scopeFile,
HashSet<? super PsiVariable> checkedVariables,
int flags,
TextRange rangeToIgnore)
{
if(!checkedVariables.add(var))
{
return;
}
//System.out.println("analyzing usages of " + var + " in file " + scopeFile);
SearchScope searchScope = new LocalSearchScope(scopeFile);
if(BitUtil.isSet(flags, CHECK_USAGE) || BitUtil.isSet(flags, CHECK_DOWN))
{
for(PsiReference varRef : ReferencesSearch.search(var, searchScope, false))
{
PsiElement ref = varRef.getElement();
if(BitUtil.isSet(flags, CHECK_USAGE))
{
PsiType type = guessElementTypeFromReference(myMethodPatternMap, ref, rangeToIgnore);
if(type != null && !(type instanceof PsiPrimitiveType))
{
typesSet.add(type);
}
}
if(BitUtil.isSet(flags, CHECK_DOWN))
{
if(ref.getParent() instanceof PsiExpressionList && ref.getParent().getParent() instanceof PsiMethodCallExpression)
{ //TODO : new
PsiExpressionList list = (PsiExpressionList) ref.getParent();
int argIndex = ArrayUtil.indexOf(list.getExpressions(), ref);
PsiMethodCallExpression methodCall = (PsiMethodCallExpression) list.getParent();
PsiMethod method = (PsiMethod) methodCall.getMethodExpression().resolve();
if(method != null)
{
PsiParameter[] parameters = method.getParameterList().getParameters();
if(argIndex < parameters.length)
{
addTypesByVariable(typesSet, parameters[argIndex], method.getContainingFile(), checkedVariables, flags | CHECK_USAGE,
rangeToIgnore);
}
}
}
}
}
}
if(BitUtil.isSet(flags, CHECK_UP))
{
if(var instanceof PsiParameter && var.getParent() instanceof PsiParameterList && var.getParent().getParent() instanceof PsiMethod)
{
PsiParameterList list = (PsiParameterList) var.getParent();
PsiParameter[] parameters = list.getParameters();
int argIndex = -1;
for(int i = 0; i < parameters.length; i++)
{
PsiParameter parameter = parameters[i];
if(parameter.equals(var))
{
argIndex = i;
break;
}
}
PsiMethod method = (PsiMethod) var.getParent().getParent();
//System.out.println("analyzing usages of " + method + " in file " + scopeFile);
for(PsiReference methodRef : ReferencesSearch.search(method, searchScope, false))
{
PsiElement ref = methodRef.getElement();
if(ref.getParent() instanceof PsiMethodCallExpression)
{
PsiMethodCallExpression methodCall = (PsiMethodCallExpression) ref.getParent();
PsiExpression[] args = methodCall.getArgumentList().getExpressions();
if(args.length <= argIndex)
{
continue;
}
PsiExpression arg = args[argIndex];
if(arg instanceof PsiReferenceExpression)
{
PsiElement refElement = ((PsiReferenceExpression) arg).resolve();
if(refElement instanceof PsiVariable)
{
addTypesByVariable(typesSet, (PsiVariable) refElement, scopeFile, checkedVariables, flags | CHECK_USAGE, rangeToIgnore);
}
}
//TODO : constructor
}
}
}
}
}
@Nullable
private static PsiType guessElementTypeFromReference(MethodPatternMap methodPatternMap,
PsiElement ref,
TextRange rangeToIgnore)
{
PsiElement refParent = ref.getParent();
if(refParent instanceof PsiReferenceExpression)
{
PsiReferenceExpression parentExpr = (PsiReferenceExpression) refParent;
if(ref.equals(parentExpr.getQualifierExpression()) && parentExpr.getParent() instanceof PsiMethodCallExpression)
{
String methodName = parentExpr.getReferenceName();
PsiMethodCallExpression methodCall = (PsiMethodCallExpression) parentExpr.getParent();
PsiExpression[] args = methodCall.getArgumentList().getExpressions();
MethodPattern pattern = methodPatternMap.findPattern(methodName, args.length);
if(pattern != null)
{
if(pattern.parameterIndex < 0)
{ // return value
if(methodCall.getParent() instanceof PsiTypeCastExpression &&
(rangeToIgnore == null || !rangeToIgnore.contains(methodCall.getTextRange())))
{
return ((PsiTypeCastExpression) methodCall.getParent()).getType();
}
}
else
{
return args[pattern.parameterIndex].getType();
}
}
}
}
return null;
}
@Nonnull
@Override
public List<PsiType> getControlFlowExpressionTypeConjuncts(@Nonnull PsiExpression expr, boolean honorAssignments)
{
if(expr.getType() instanceof PsiPrimitiveType)
{
return Collections.emptyList();
}
PsiExpression place = PsiUtil.skipParenthesizedExprDown(expr);
if(place == null)
{
return Collections.emptyList();
}
List<PsiType> result = null;
if(!ControlFlowAnalyzer.inlinerMayInferPreciseType(place))
{
GuessTypeVisitor visitor = tryGuessingTypeWithoutDfa(place, honorAssignments);
if(!visitor.isDfaNeeded())
{
result = visitor.mySpecificType == null ?
Collections.emptyList() : Collections.singletonList(DfaPsiUtil.tryGenerify(expr, visitor.mySpecificType));
}
}
if(result == null)
{
PsiType psiType = getTypeFromDataflow(expr, honorAssignments);
if(psiType instanceof PsiIntersectionType)
{
result = ContainerUtil.mapNotNull(((PsiIntersectionType) psiType).getConjuncts(), type -> DfaPsiUtil.tryGenerify(expr, type));
}
else if(psiType != null)
{
result = Collections.singletonList(DfaPsiUtil.tryGenerify(expr, psiType));
}
else
{
result = Collections.emptyList();
}
}
result = ContainerUtil.filter(result, t -> {
PsiClass typeClass = PsiUtil.resolveClassInType(t);
return typeClass == null || PsiUtil.isAccessible(typeClass, expr, null);
});
if(result.equals(Collections.singletonList(TypeConversionUtil.erasure(expr.getType()))))
{
return Collections.emptyList();
}
return result;
}
@Nonnull
private static GuessTypeVisitor tryGuessingTypeWithoutDfa(PsiExpression place, boolean honorAssignments)
{
List<PsiElement> exprsAndVars = getPotentiallyAffectingElements(place);
GuessTypeVisitor visitor = new GuessTypeVisitor(place, honorAssignments);
for(PsiElement e : exprsAndVars)
{
e.accept(visitor);
if(e == place || visitor.isDfaNeeded())
{
break;
}
}
return visitor;
}
private static List<PsiElement> getPotentiallyAffectingElements(PsiExpression place)
{
PsiElement topmostBlock = getTopmostBlock(place);
return CachedValuesManager.getCachedValue(topmostBlock, () -> {
List<PsiElement> list = SyntaxTraverser.psiTraverser(topmostBlock).filter(e -> e instanceof PsiExpression || e instanceof PsiLocalVariable).toList();
return new CachedValueProvider.Result<>(list, topmostBlock);
});
}
private static class GuessTypeVisitor extends JavaElementVisitor
{
private static final CallMatcher OBJECT_GET_CLASS =
CallMatcher.exactInstanceCall(CommonClassNames.JAVA_LANG_OBJECT, "getClass").parameterCount(0);
private final
@Nonnull
PsiExpression myPlace;
PsiType mySpecificType;
private boolean myNeedDfa;
private boolean myDeclared;
private final boolean myHonorAssignments;
GuessTypeVisitor(@Nonnull PsiExpression place, boolean honorAssignments)
{
myPlace = place;
myHonorAssignments = honorAssignments;
}
protected void handleAssignment(@Nullable PsiExpression expression)
{
if(!myHonorAssignments || expression == null)
{
return;
}
PsiType type = expression.getType();
if(type instanceof PsiPrimitiveType)
{
type = ((PsiPrimitiveType) type).getBoxedType(expression);
}
PsiType rawType = type instanceof PsiClassType ? ((PsiClassType) type).rawType() : type;
if(rawType == null || rawType.equals(PsiType.NULL))
{
return;
}
if(mySpecificType == null)
{
mySpecificType = rawType;
}
else if(!mySpecificType.equals(rawType))
{
myNeedDfa = true;
}
}
@Override
public void visitAssignmentExpression(PsiAssignmentExpression expression)
{
if(ExpressionVariableDescriptor.EXPRESSION_HASHING_STRATEGY.equals(expression.getLExpression(), myPlace))
{
handleAssignment(expression.getRExpression());
}
super.visitAssignmentExpression(expression);
}
@Override
public void visitLocalVariable(PsiLocalVariable variable)
{
if(ExpressionUtils.isReferenceTo(myPlace, variable))
{
myDeclared = true;
handleAssignment(variable.getInitializer());
}
super.visitLocalVariable(variable);
}
@Override
public void visitTypeCastExpression(PsiTypeCastExpression expression)
{
PsiExpression operand = expression.getOperand();
if(operand != null && ExpressionVariableDescriptor.EXPRESSION_HASHING_STRATEGY.equals(operand, myPlace))
{
myNeedDfa = true;
}
super.visitTypeCastExpression(expression);
}
@Override
public void visitMethodCallExpression(PsiMethodCallExpression call)
{
if(OBJECT_GET_CLASS.test(call))
{
PsiExpression qualifier = ExpressionUtils.getEffectiveQualifier(call.getMethodExpression());
if(qualifier != null && ExpressionVariableDescriptor.EXPRESSION_HASHING_STRATEGY.equals(qualifier, myPlace))
{
myNeedDfa = true;
}
}
super.visitMethodCallExpression(call);
}
@Override
public void visitInstanceOfExpression(PsiInstanceOfExpression expression)
{
if(ExpressionVariableDescriptor.EXPRESSION_HASHING_STRATEGY.equals(expression.getOperand(), myPlace))
{
myNeedDfa = true;
}
super.visitInstanceOfExpression(expression);
}
public boolean isDfaNeeded()
{
if(myNeedDfa)
{
return true;
}
if(myDeclared || mySpecificType == null)
{
return false;
}
PsiType type = myPlace.getType();
PsiType rawType = type instanceof PsiClassType ? ((PsiClassType) type).rawType() : type;
return !mySpecificType.equals(rawType);
}
}
abstract static class CastTrackingVisitor extends StandardInstructionVisitor
{
@Override
public DfaInstructionState[] visitTypeCast(TypeCastInstruction instruction, DataFlowRunner runner, DfaMemoryState memState)
{
DfaValue value = memState.pop();
memState.push(adjustValue(runner, value, instruction.getCasted()));
return super.visitTypeCast(instruction, runner, memState);
}
@Override
public DfaInstructionState[] visitInstanceof(InstanceofInstruction instruction, DataFlowRunner runner, DfaMemoryState memState)
{
DfaValue dfaRight = memState.pop();
DfaValue dfaLeft = memState.pop();
memState.push(adjustValue(runner, dfaLeft, instruction.getLeft()));
memState.push(dfaRight);
return super.visitInstanceof(instruction, runner, memState);
}
private DfaValue adjustValue(DataFlowRunner runner, DfaValue value, @Nullable PsiExpression expression)
{
if(expression != null && isInteresting(value, expression))
{
value = runner.getFactory().getVarFactory().createVariableValue(new ExpressionVariableDescriptor(expression));
}
return value;
}
boolean isInteresting(@Nonnull DfaValue value, @Nonnull PsiExpression expression)
{
return true;
}
}
private static final class ExpressionTypeInstructionVisitor extends CastTrackingVisitor
{
private final Map<DfaVariableValue, TypeConstraint> myResult = new HashMap<>();
private final PsiElement myForPlace;
private ExpressionTypeInstructionVisitor(@Nonnull PsiElement forPlace)
{
myForPlace = PsiUtil.skipParenthesizedExprUp(forPlace);
}
MultiMap<PsiExpression, PsiType> getResult()
{
MultiMap<PsiExpression, PsiType> result = MultiMap.createSet(ExpressionVariableDescriptor.EXPRESSION_HASHING_STRATEGY);
Project project = myForPlace.getProject();
myResult.forEach((value, constraint) -> {
if(value.getDescriptor() instanceof ExpressionVariableDescriptor)
{
PsiExpression expression = ((ExpressionVariableDescriptor) value.getDescriptor()).getExpression();
PsiType type = constraint.getPsiType(project);
if(type instanceof PsiIntersectionType)
{
result.putValues(expression, Arrays.asList(((PsiIntersectionType) type).getConjuncts()));
}
else if(type != null)
{
result.putValue(expression, type);
}
}
});
return result;
}
@Override
protected void beforeExpressionPush(@Nonnull DfaValue value,
@Nonnull PsiExpression expression,
@Nullable TextRange range,
@Nonnull DfaMemoryState state)
{
if(range == null && myForPlace == expression)
{
((DfaMemoryStateImpl) state).forRecordedVariableTypes((var, dfType) -> {
myResult.merge(var, TypeConstraint.fromDfType(dfType), TypeConstraint::join);
});
}
super.beforeExpressionPush(value, expression, range, state);
}
}
}
| apache-2.0 |
yinhongbo/URAS | urasd/sync/config.go | 892 | package syncSvc
import (
"encoding/json"
"fmt"
)
func Init() *SyncSvc {
syncSvc := &SyncSvc{}
syncSvc.SvcList = SvcList{}
syncSvc.exitChan = struct{}{}
return syncSvc
}
func (this *SyncSvc) Start() {
this.getConfigFromApi()
this.parseConfig()
fmt.Printf("%#v", this.SvcList["test"])
}
func (this *SyncSvc) getConfigFromApi() error {
this.SvcCnfStr = `{"test":{"lb":1,"retry":2,"timeout":3000,"checksum":"ebd3c06fc2a628f5235196456224b60b","list":[{"ip":"127.0.0.1","weight":50}]}}`
return nil
}
func (this *SyncSvc) parseConfig() error {
cnf := SvcList{}
if err := json.Unmarshal([]byte(this.SvcCnfStr), &cnf); err != nil {
return err
}
for k, v := range cnf {
if node, exist := this.SvcList[k]; exist == true {
if node.CheckSum != v.CheckSum {
this.SvcList[k] = v
}
} else {
this.SvcList[k] = v
}
}
return nil
}
func (this *SyncSvc) Close() {
}
| apache-2.0 |
oba2cat3/GCTest | GCTest/gen/com/gc_test/BuildConfig.java | 153 | /** Automatically generated file. DO NOT MODIFY */
package com.gc_test;
public final class BuildConfig {
public final static boolean DEBUG = true;
} | apache-2.0 |
xiaomozhang/druid | druid-1.0.9/src/test/java/com/alibaba/druid/bvt/sql/mysql/MySqlCreateTableTest1.java | 2316 | /*
* Copyright 1999-2011 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.druid.bvt.sql.mysql;
import java.util.List;
import org.junit.Assert;
import com.alibaba.druid.sql.MysqlTest;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.dialect.mysql.parser.MySqlStatementParser;
import com.alibaba.druid.sql.dialect.mysql.visitor.MySqlSchemaStatVisitor;
import com.alibaba.druid.stat.TableStat;
import com.alibaba.druid.stat.TableStat.Column;
public class MySqlCreateTableTest1 extends MysqlTest {
public void test_0() throws Exception {
String sql = "CREATE TABLE lookup" + //
" (id INT, INDEX USING BTREE (id))" + //
" ENGINE = MEMORY;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement statemen = statementList.get(0);
print(statementList);
Assert.assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
statemen.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("orderBy : " + visitor.getOrderByColumns());
Assert.assertEquals(1, visitor.getTables().size());
Assert.assertEquals(1, visitor.getColumns().size());
Assert.assertEquals(0, visitor.getConditions().size());
Assert.assertTrue(visitor.getTables().containsKey(new TableStat.Name("lookup")));
Assert.assertTrue(visitor.getColumns().contains(new Column("lookup", "id")));
}
}
| apache-2.0 |
vbartosik/pynet_test | parser.py | 990 | from ciscoconfparse import CiscoConfParse
cfg = CiscoConfParse('cisco.cfg')
crypto = cfg.find_objects(r"^crypto map CRYPTO")
print("Task 1: Find all of the crypto map entries in the file (lines that begin with 'crypto map CRYPTO') and for each crypto map entry print out its children:\n")
for map in crypto:
print(map.text + "configuration is:")
for child in map.children:
print(child.text)
print("\nTask 2: Find all of the crypto map entries that are using PFS group2:\n")
pfs = cfg.find_objects_w_child(parentspec=r"^crypto map CRYPTO", childspec=r"set pfs group2")
for map in pfs:
print map.text
print("\nTask 3: Find the crypto maps that are not using AES (based-on the transform set name). Print these entries and their corresponding transform set name.\n")
trans = cfg.find_objects_wo_child(parentspec=r"^crypto map CRYPTO", childspec=r"AES-SHA")
for map in trans:
print map.text
print(map.re_search_children(r"set transform-set")[0].text)
| apache-2.0 |
mjn19172/Savu | savu/plugins/driver/gpu_plugin.py | 2763 | # Copyright 2014 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: gpu_plugin
:platform: Unix
:synopsis: Base class for all plugins which use a GPU on the target machine
.. moduleauthor:: Mark Basham <scientificsoftware@diamond.ac.uk>
"""
import logging
class GpuPlugin(object):
"""
The base class from which all plugins should inherit.
"""
def __init__(self):
super(GpuPlugin, self).__init__()
def run_plugin(self, data, output, processes, plugin):
count = 0
gpu_processes = []
gpu_list = ["GPU" in i for i in processes]
for i in gpu_list:
if i:
gpu_processes.append(count)
count += 1
else:
gpu_processes.append(-1)
if gpu_processes[plugin] >= 0:
logging.debug("Running the GPU Process %i", plugin)
new_processes = [i for i in processes if "GPU" in i]
logging.debug(new_processes)
logging.debug(gpu_processes)
logging.debug("Process is %s",
new_processes[gpu_processes[plugin]])
self.pre_process(data.get_data_shape())
self.process(data, output, new_processes,
gpu_processes[plugin])
self.post_process()
return
logging.debug("Not Running the task as not GPU")
return
def process(self, data, output, processes, plugin):
"""
This method is called after the plugin has been created by the
pipeline framework
:param data: The input data object.
:type data: savu.data.structures
:param data: The output data object
:type data: savu.data.structures
:param processes: The number of processes which will be doing the work
:type path: int
:param path: The specific process which we are
:type path: int
"""
logging.error("process needs to be implemented for proc %i of %i :" +
" input is %s and output is %s",
plugin, processes, data.__class__, output.__class__)
raise NotImplementedError("process needs to be implemented")
| apache-2.0 |
shsdev/archiventory | src/main/java/eu/scape_project/archiventory/utils/IOUtils.java | 6145 | /*
* Copyright 2012 The SCAPE Project Consortium.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* under the License.
*/
package eu.scape_project.archiventory.utils;
import java.io.*;
import org.apache.commons.lang.RandomStringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* I/O Utils
*
* @author Sven Schlarb https://github.com/shsdev
* @version 0.1
*/
public class IOUtils {
private static Logger logger = LoggerFactory.getLogger(IOUtils.class.getName());
/**
* Copy input stream to temporary file
*
* @param is Input sream
* @param prefix Prefix of temporary file
* @param ext Extension of temporary file
* @return Temporary file
*/
public static File copyInputStreamToTempFile(InputStream is, String prefix, String ext) {
FileOutputStream fos = null;
File tmpFile = null;
try {
tmpFile = File.createTempFile(prefix, ext);
fos = new FileOutputStream(tmpFile);
org.apache.commons.io.IOUtils.copy(is, fos);
fos.flush();
} catch (FileNotFoundException ex) {
logger.error("Temporary file not available.", ex);
} catch (IOException ex) {
logger.error("I/O Error occured.", ex);
} finally {
if (is != null) {
try {
is.close();
} catch (IOException _) {
// ignore
}
}
if (fos != null) {
try {
fos.close();
} catch (IOException _) {
// ignore
}
}
return tmpFile;
}
}
/**
* Copy byte array to file in temporary directory
*
* @param barray byte array
* @param dir Directory where the temporary file is created
* @param ext Extension of temporary file
* @return Temporary file
*/
public static File copyByteArrayToTempFileInDir(byte[] barray, String dir, String ext) {
String filename = System.currentTimeMillis() + RandomStringUtils.randomAlphabetic(5) + ext;
if (!dir.endsWith("/")) {
dir += "/";
}
FileOutputStream fos = null;
File tmpFile = null;
try {
tmpFile = new File(dir + filename);
fos = new FileOutputStream(tmpFile);
org.apache.commons.io.IOUtils.write(barray, fos);
fos.flush();
fos.close();
} catch (FileNotFoundException ex) {
logger.error("Temporary file not available.", ex);
} catch (IOException ex) {
logger.error("I/O Error", ex);
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException _) {
// ignore
}
}
}
return tmpFile;
}
/**
* Copy byte array to temporary file
*
* @param barray byte array
* @param prefix Prefix of temporary file
* @param ext Extension of temporary file
* @return Temporary file
*/
public static File copyByteArrayToTempFile(byte[] barray, String prefix, String ext) {
FileOutputStream fos = null;
File tmpFile = null;
try {
tmpFile = File.createTempFile(prefix, ext);
fos = new FileOutputStream(tmpFile);
org.apache.commons.io.IOUtils.write(barray, fos);
fos.flush();
fos.close();
} catch (FileNotFoundException ex) {
logger.error("Temporary file not available.", ex);
} catch (IOException ex) {
logger.error("I/O Error", ex);
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException _) {
// ignore
}
}
}
return tmpFile;
}
public static String copyInputStreamToString(InputStream is) {
String strContent = null;
try {
strContent = org.apache.commons.io.IOUtils.toString(is);
} catch (IOException ex) {
logger.error("I/O Error", ex);
} finally {
if (is != null) {
try {
is.close();
} catch (IOException _) {
// ignore
}
}
}
return strContent;
}
public static byte[] getBytesFromFile(String filePath) throws IOException {
File file = new File(filePath);
if (!file.exists()) {
throw new FileNotFoundException("File not available");
}
InputStream is = null;
byte[] bytes = null;
try {
is = new FileInputStream(file);
long length = file.length();
if (length > Integer.MAX_VALUE) {
throw new IllegalArgumentException("File object is too large");
}
bytes = new byte[(int) length];
int offset = 0;
int numRead = 0;
while (offset < bytes.length
&& (numRead = is.read(bytes, offset, bytes.length - offset)) >= 0) {
offset += numRead;
}
} catch (IOException ex) {
logger.error("I/O Error", ex);
} finally {
if (is != null) {
try {
is.close();
} catch (IOException _) {
// ignore
}
}
}
return bytes;
}
}
| apache-2.0 |
jdufner/microservice | gateway/src/main/java/de/jdufner/microservice/gateway/SimpleFilter.java | 1544 | /*
* Copyright 2016, Jürgen Dufner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.jdufner.microservice.gateway;
import com.netflix.zuul.ZuulFilter;
import com.netflix.zuul.context.RequestContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.servlet.http.HttpServletRequest;
/**
* @author Jürgen Dufner
* @since 0.0.1
*/
@Component
public class SimpleFilter extends ZuulFilter {
private static final Logger LOG = LoggerFactory.getLogger(SimpleFilter.class);
@Override
public String filterType() {
return "pre";
}
@Override
public int filterOrder() {
return 1;
}
@Override
public boolean shouldFilter() {
return true;
}
@Override
public Object run() {
RequestContext ctx = RequestContext.getCurrentContext();
HttpServletRequest request = ctx.getRequest();
LOG.info(String.format("%s request to %s", request.getMethod(), request.getRequestURL().toString()));
return null;
}
}
| apache-2.0 |
aaabhilash97/google-python-exercises | basic/mimic.py | 2416 | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
"""Mimic pyquick exercise -- optional extra exercise.
Google's Python Class
Read in the file specified on the command line.
Do a simple split() on whitespace to obtain all the words in the file.
Rather than read the file line by line, it's easier to read
it into one giant string and split it once.
Build a "mimic" dict that maps each word that appears in the file
to a list of all the words that immediately follow that word in the file.
The list of words can be be in any order and should include
duplicates. So for example the key "and" might have the list
["then", "best", "then", "after", ...] listing
all the words which came after "and" in the text.
We'll say that the empty string is what comes before
the first word in the file.
With the mimic dict, it's fairly easy to emit random
text that mimics the original. Print a word, then look
up what words might come next and pick one at random as
the next work.
Use the empty string as the first word to prime things.
If we ever get stuck with a word that is not in the dict,
go back to the empty string to keep things moving.
Note: the standard python module 'random' includes a
random.choice(list) method which picks a random element
from a non-empty list.
For fun, feed your program to itself as input.
Could work on getting it to put in linebreaks around 70
columns, so the output looks better.
"""
import random
import sys
def mimic_dict(filename):
"""Returns mimic dict mapping each word to list of words which follow it."""
# +++you
mimic_dict = {}
f = open(filename, 'r')
text = f.read()
f.close()
words = text.split()
prev = ''
for word in words:
if not prev in mimic_dict:
mimic_dict[prev] = [word]
else:
mimic_dict[prev].append(word)
prev = word
return mimic_dict
def print_mimic(mimic_dict, word):
for i in range(100):
print word,
nx=mimic_dict.get(word)
if not nx:
nx=mimic_dict['']
word=random.choice(nx)
# Provided main(), calls mimic_dict() and mimic()
def main():
if len(sys.argv) != 2:
print 'usage: ./mimic.py file-to-read'
sys.exit(1)
dict = mimic_dict(sys.argv[1])
print_mimic(dict, '')
if __name__ == '__main__':
main()
| apache-2.0 |
felliott/scrapi | scrapi/migrations.py | 4163 | import copy
import logging
from scrapi import tasks
from scrapi import settings
from scrapi.linter import RawDocument
from scrapi.events import log_to_sentry
from scrapi.processing import get_processor
logger = logging.getLogger()
@tasks.task_autoretry(default_retry_delay=30, max_retries=5)
def rename(docs, target=None, **kwargs):
assert target, "To run this migration you need a target."
for doc in docs:
new_doc = copy.deepcopy(doc.raw.attributes)
new_doc['source'] = target
raw = RawDocument(new_doc, validate=False)
assert doc.raw.attributes['source'] != target, "Can't rename {} to {}, names are the same.".format(doc.raw['source'], target)
if not kwargs.get('dry'):
tasks.process_raw(raw)
tasks.process_normalized(tasks.normalize(raw, raw['source']), raw)
logger.info('Processed document from {} with id {}'.format(doc.raw.attributes['source'], raw['docID']))
es_processor = get_processor('elasticsearch')
es_processor.manager.es.delete(index=settings.ELASTIC_INDEX, doc_type=doc.raw.attributes['source'], id=raw['docID'], ignore=[404])
es_processor.manager.es.delete(index='share_v1', doc_type=doc.raw.attributes['source'], id=raw['docID'], ignore=[404])
logger.info('Renamed document from {} to {} with id {}'.format(doc.raw.attributes['source'], target, raw['docID']))
@tasks.task_autoretry(default_retry_delay=30, max_retries=5)
def cross_db(docs, target_db=None, index=None, **kwargs):
"""
Migration to go between
cassandra > postgres
postgres > cassandra
cassandra > elasticsearch
postgres > elasticsearch
source db can be passed in to the migrate task, and will default to the CANONICAL_PROCESSOR specified in settings
target_db will be specified when the task is called
"""
assert target_db, 'Please specify a target db for the migration -- either postgres or elasticsearch'
assert target_db in ['postgres', 'cassandra', 'elasticsearch'], 'Invalid target database - please specify either postgres, cassandra or elasticsearch'
for doc in docs:
try:
if not doc.raw['doc']:
# corrupted database item has no doc element
message = 'No doc element in raw doc -- could not migrate document from {} with id {}'.format(doc.raw.attributes['source'], doc.raw.attributes['docID'])
log_to_sentry(message)
logger.info(message)
continue
raw, normalized = doc.raw, doc.normalized
target_processor = get_processor(target_db)
if not kwargs.get('dry'):
target_processor.process_raw(raw)
if normalized:
target_processor.process_normalized(raw, normalized)
else:
logger.info('Not storing migrated normalized from {} with id {}, document is not in approved set list.'.format(raw.attributes['source'], raw.attributes['docID']))
except Exception as e:
logger.exception(e)
log_to_sentry(e)
@tasks.task_autoretry(default_retry_delay=1, max_retries=5)
def renormalize(docs, *args, **kwargs):
for doc in docs:
if not kwargs.get('dry'):
tasks.process_normalized(tasks.normalize(doc.raw, doc.raw['source']), doc.raw)
@tasks.task_autoretry(default_retry_delay=30, max_retries=5)
def delete(docs, sources=None, **kwargs):
for doc in docs:
assert sources, "To run this migration you need a source."
processor = get_processor(settings.CANONICAL_PROCESSOR)
processor.delete(source=doc.raw.attributes['source'], docID=doc.raw.attributes['docID'])
es_processor = get_processor('elasticsearch')
es_processor.manager.es.delete(index=settings.ELASTIC_INDEX, doc_type=sources, id=doc.raw.attributes['docID'], ignore=[404])
es_processor.manager.es.delete(index='share_v1', doc_type=sources, id=doc.raw.attributes['docID'], ignore=[404])
logger.info('Deleted document from {} with id {}'.format(sources, doc.raw.attributes['docID']))
| apache-2.0 |
andyredhead/graph-connection | src/test/java/net/sf/moksha/graph/access/connectors/neo4j/TestNeo4jQueryComponentsToCypherConverter.java | 19105 | package net.sf.moksha.graph.access.connectors.neo4j;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.concurrent.ConcurrentSkipListMap;
import net.sf.moksha.graph.access.entity.GraphNode;
import net.sf.moksha.graph.access.entity.GraphRelationship;
import net.sf.moksha.graph.access.entity.impl.BaseGraphComponentFactory;
import net.sf.moksha.graph.access.entity.impl.BaseGraphNode;
import net.sf.moksha.graph.access.entity.impl.BaseGraphRelationship;
import net.sf.moksha.graph.access.entity.impl.BlankNode;
import net.sf.moksha.graph.access.exception.InformationProblem;
import net.sf.moksha.graph.access.query.GraphFragment;
import net.sf.moksha.graph.access.query.QueryComponents;
import net.sf.moksha.graph.access.query.part.CreateClause;
import net.sf.moksha.graph.access.query.part.MatchClause;
import net.sf.moksha.graph.access.query.part.ReturnClause;
import org.junit.Before;
import org.junit.Test;
public class TestNeo4jQueryComponentsToCypherConverter {
public static final String PROP_NAME_1 = "prop1";
public static final String PROP_NAME_2 = "prop2";
public static final String REL_TYPE_1 = "test-rel";
Neo4jQueryComponentsToCypherConverter _cmp2Cy;
// single value, check each type
/**
* Check string.
*/
@Test
public void testSingleStringProperty() {
String propValue = "test property value";
List<Object> valList = createValueList(propValue);
try {
String encodedProp = _cmp2Cy.encodeSingleProperty(PROP_NAME_1, valList);
assertNotNull("testSingleStringProperty - encoded prop is null", encodedProp);
System.out.println("testSingleStringProperty - encoded property is: " + encodedProp);
String expectedEncoding = "`" + PROP_NAME_1 + "`:" + "\"" + propValue + "\"";
assertTrue("testSingleStringProperty - encoded prop did not match expected, was: " + encodedProp, expectedEncoding.equals(encodedProp));
} catch (InformationProblem e) {
System.out.println("testSingleStringProperty - exception");
e.printStackTrace();
fail();
}
}
/**
* Check Integer
*/
@Test
public void testSingleIntegerProperty() {
List<Object> valList = createValueList(new Integer(2));
try {
String encodedProp = _cmp2Cy.encodeSingleProperty(PROP_NAME_1, valList);
assertNotNull("testSingleIntegerProperty - encoded prop is null", encodedProp);
System.out.println("testSingleIntegerProperty - encoded property is: " + encodedProp);
String expectedEncoding = "`" + PROP_NAME_1 + "`:2";
assertTrue("testSingleIntegerProperty - encoded prop did not match expected, was: " + encodedProp, expectedEncoding.equals(encodedProp));
} catch (InformationProblem e) {
fail("testSingleIntegerProperty - problem: " + e);
}
}
/**
* Check Double
*/
@Test
public void testSingleDoubleProperty() {
List<Object> valList = createValueList(new Double(2));
try {
String encodedProp = _cmp2Cy.encodeSingleProperty(PROP_NAME_1, valList);
assertNotNull("testSingleDoubleProperty - encoded prop is null", encodedProp);
System.out.println("testSingleDoubleProperty - encoded property is: " + encodedProp);
String expectedEncoding = "`" + PROP_NAME_1 + "`:2.0";
assertTrue("testSingleDoubleProperty - encoded prop did not match expected, was: " + encodedProp, expectedEncoding.equals(encodedProp));
} catch (InformationProblem e) {
fail("testSingleDoubleProperty - problem: " + e);
}
}
/**
* Check Boolean
*/
@Test
public void testSingleBooleanProperty() {
List<Object> valList = createValueList(new Boolean(Boolean.TRUE));
try {
String encodedProp = _cmp2Cy.encodeSingleProperty(PROP_NAME_1, valList);
assertNotNull("testSingleBooleanProperty - encoded prop is null", encodedProp);
System.out.println("testSingleBooleanProperty - encoded property is: " + encodedProp);
String expectedEncoding = "`" + PROP_NAME_1 + "`:true";
assertTrue("testSingleBooleanProperty - encoded prop did not match expected, was: " + encodedProp, expectedEncoding.equals(encodedProp));
} catch (InformationProblem e) {
fail("testSingleBooleanProperty - problem: " + e);
}
}
// multi value
/**
* Check multi-valued property (using String).
*/
@Test
public void testMultipleStringProperty() {
List<Object> valList = createValueList("propval-1", "propval-2");
try {
String encodedProp = _cmp2Cy.encodeSingleProperty(PROP_NAME_1, valList);
assertNotNull("testMultipleStringProperty - encoded prop is null", encodedProp);
System.out.println("testMultipleStringProperty - encoded property is: " + encodedProp);
String expectedEncoding = "`" + PROP_NAME_1 + "`:[\"propval-1\", \"propval-2\"]";
assertTrue("testMultipleStringProperty - encoded prop did not match expected, was: " + encodedProp, expectedEncoding.equals(encodedProp));
} catch (InformationProblem e) {
fail("testMultipleStringProperty - problem: " + e);
}
}
// multiple property map
/**
* Check a map including multiple properties.
*/
@Test
public void testBasicPropertyMap() {
Map<String, List<Object>> propMap = new LinkedHashMap<>();
List<Object> prop1ValList = createValueList("a value", "another value");
propMap.put(PROP_NAME_1, prop1ValList);
List<Object> prop2ValList = createValueList("more value");
propMap.put(PROP_NAME_2, prop2ValList);
try {
String encodedPropertiesStr = _cmp2Cy.graphComponentPropertiesAsCypherString(propMap);
assertNotNull("testBasicPropertyMap - encoded properties string is null");
String expectedEncoding = "{ `" + PROP_NAME_1 + "`:[\"a value\", \"another value\"], `" + PROP_NAME_2 + "`:\"more value\"" + " }";
assertTrue("testBasicPropertyMap - encoded properties did not match expected, was: " + encodedPropertiesStr, expectedEncoding.equals(encodedPropertiesStr));
} catch (InformationProblem e) {
fail("testBasicPropertyMap - problem: " + e);
}
}
// node
@Test
public void testNodeWithJustAlias() {
try {
BaseGraphNode baseNd = BaseGraphComponentFactory.instanceForBaseGraphNode(null, null, "test-node");
String encNdStr = _cmp2Cy.convertBaseGraphNodeForMatchClause(baseNd);
assertNotNull("testNodeWithJustAlias - encoded node is null", encNdStr);
String expectedNdEncStr = "(test-node)";
assertTrue("testNodeWithJustAlias - encoded node did not match expected, was: " + encNdStr, expectedNdEncStr.equals(encNdStr));
} catch (InformationProblem e) {
fail("testNodeWithJustAlias - problem: " + e);
}
}
@Test
public void testNodeWithNullLocalAlias() {
try {
BaseGraphNode baseNd = BaseGraphComponentFactory.instanceForBaseGraphNode(null, null, null);
String encNdStr = _cmp2Cy.convertBaseGraphNodeForMatchClause(baseNd);
assertNotNull("testNodeWithNullLocalAlias - encoded node is null", encNdStr);
String expectedNdEncStr = "(nd-0)";
assertTrue("testNodeWithNullLocalAlias - encoded node did not match expected, was: " + encNdStr, expectedNdEncStr.equals(encNdStr));
} catch (InformationProblem e) {
fail("testNodeWithNullLocalAlias - problem: " + e);
}
}
@Test
public void testNodeWithLabels() {
try {
List<String> labelList = new Vector<>();
labelList.add("label-1");
labelList.add("label-2");
BaseGraphNode baseNd = BaseGraphComponentFactory.instanceForBaseGraphNode(labelList, null, "test-node");
String encNdStr = _cmp2Cy.convertBaseGraphNodeForMatchClause(baseNd);
assertNotNull("testNodeWithLabels - encoded node is null", encNdStr);
String expectedNdEncStr = "(test-node:label-1:label-2)";
assertTrue("testNodeWithLabels - encoded node did not match expected, was: " + encNdStr, expectedNdEncStr.equals(encNdStr));
} catch (InformationProblem e) {
fail("testNodeWithLabels - problem: " + e);
}
}
@Test
public void testNodeWithLabelsAndProperties() {
try {
List<String> labelList = new Vector<>();
labelList.add("label-1");
labelList.add("label-2");
Map<String, List<Object>> propMap = new ConcurrentSkipListMap<>();
List<Object> prop1ValList = createValueList("a value", "another value");
propMap.put(PROP_NAME_1, prop1ValList);
BaseGraphNode baseNd = BaseGraphComponentFactory.instanceForBaseGraphNode(labelList, propMap, "test-node");
String encNdStr = _cmp2Cy.convertBaseGraphNodeForMatchClause(baseNd);
assertNotNull("testNodeWithLabels - encoded node is null", encNdStr);
String expectedNdEncStr = "(test-node:label-1:label-2 " + "{ `" + PROP_NAME_1 + "`:[\"a value\", \"another value\"]" + " }" + ")";
assertTrue("testNodeWithLabels - encoded node did not match expected, was: " + encNdStr, expectedNdEncStr.equals(encNdStr));
} catch (InformationProblem e) {
fail("testNodeWithLabels - problem: " + e);
}
}
// relationship
@Test
public void testRelationshipJustDirection() {
try {
BaseGraphRelationship rel = BaseGraphComponentFactory.instanceForBaseGraphRelationship(BaseGraphComponentFactory.instanceForBlankNode(null), BaseGraphComponentFactory.instanceForBlankNode(null), null, null);
String encRelStr = _cmp2Cy.convertBaseGraphRelationshipForMatchClause(rel);
String expectedRelEncStr = "-->";
assertTrue("testRelationshipJustDirection - encoded relationship did not match expected, was: " + encRelStr, expectedRelEncStr.equals(encRelStr));
} catch (InformationProblem e) {
fail("testRelationshipJustDirection - problem: " + e);
}
}
@Test
public void testRelationshipNoDirection() {
try {
BaseGraphRelationship rel = BaseGraphComponentFactory.instanceForBaseGraphRelationship(BaseGraphComponentFactory.instanceForBlankNode(null), BaseGraphComponentFactory.instanceForBlankNode(null), null, null);
rel.setDirectionMatters(false);
String encRelStr = _cmp2Cy.convertBaseGraphRelationshipForMatchClause(rel);
String expectedRelEncStr = "--";
assertTrue("testRelationshipNoDirection - encoded relationship did not match expected, was: " + encRelStr, expectedRelEncStr.equals(encRelStr));
} catch (InformationProblem e) {
fail("testRelationshipNoDirection - problem: " + e);
}
}
@Test
public void testRelationshipWithLocalAlias() {
try {
BaseGraphRelationship rel = BaseGraphComponentFactory.instanceForBaseGraphRelationship(BaseGraphComponentFactory.instanceForBlankNode(null), BaseGraphComponentFactory.instanceForBlankNode(null), null, null);
rel.setLocalAlias("test-rel");
String encRelStr = _cmp2Cy.convertBaseGraphRelationshipForMatchClause(rel);
String expectedRelEncStr = "-[test-rel]->";
assertTrue("testRelationshipWithLocalAlias - encoded relationship did not match expected, was: " + encRelStr, expectedRelEncStr.equals(encRelStr));
} catch (InformationProblem e) {
fail("testRelationshipWithLocalAlias - problem: " + e);
}
}
@Test
public void testRelationshipWithType() {
try {
BaseGraphRelationship rel = BaseGraphComponentFactory.instanceForBaseGraphRelationship(BaseGraphComponentFactory.instanceForBlankNode(null), BaseGraphComponentFactory.instanceForBlankNode(null), "rel-type", null);
String encRelStr = _cmp2Cy.convertBaseGraphRelationshipForMatchClause(rel);
String expectedRelEncStr = "-[:`rel-type`]->";
assertTrue("testRelationshipWithType - encoded relationship did not match expected, was: " + encRelStr, expectedRelEncStr.equals(encRelStr));
} catch (InformationProblem e) {
fail("testRelationshipWithType - problem: " + e);
}
}
@Test
public void testRelationshipWithProperties() {
try {
List<Object> prop1ValList = createValueList("a value");
Map<String, List<Object>> propMap = new ConcurrentSkipListMap<>();
propMap.put(PROP_NAME_1, prop1ValList);
BaseGraphRelationship rel = BaseGraphComponentFactory.instanceForBaseGraphRelationship(BaseGraphComponentFactory.instanceForBlankNode(null), BaseGraphComponentFactory.instanceForBlankNode(null), "rel-type", propMap);
String encRelStr = _cmp2Cy.convertBaseGraphRelationshipForMatchClause(rel);
String expectedRelEncStr = "-[:`rel-type`" + "{ `" + PROP_NAME_1 + "`:\"a value\"" + " }" + "]->";
assertTrue("testRelationshipWithProperties - encoded relationship did not match expected, was: " + encRelStr, expectedRelEncStr.equals(encRelStr));
} catch (InformationProblem e) {
fail("testRelationshipWithProperties - problem: " + e);
}
}
// blank node
@Test
public void testBlankNodeWithoutLocalAlias() {
BlankNode blankNd = BaseGraphComponentFactory.instanceForBlankNode(null);
String encBlankNdStr = _cmp2Cy.convertBaseBlankNodeForMatchClause(blankNd);
assertNotNull("testBlankNodeWithoutLocalAlias - encoded blank node is null", encBlankNdStr);
String expectedBlankNdEncStr = "()";
assertTrue("testBlankNodeWithoutLocalAlias - encoded blank node did not match expected, was: " + encBlankNdStr, expectedBlankNdEncStr.equals(encBlankNdStr));
}
@Test
public void testBlankNodeWithLocalAlias() {
BlankNode blankNd = BaseGraphComponentFactory.instanceForBlankNode("blank-nd");
String encBlankNdStr = _cmp2Cy.convertBaseBlankNodeForMatchClause(blankNd);
assertNotNull("testBlankNodeWithLocalAlias - encoded blank node is null", encBlankNdStr);
String expectedBlankNdEncStr = "(blank-nd)";
assertTrue("testBlankNodeWithLocalAlias - encoded blank node did not match expected, was: " + encBlankNdStr, expectedBlankNdEncStr.equals(encBlankNdStr));
}
// process individual match clause
@Test
public void testProcessMatchClause() {
try {
GraphNode nd1 = createTestNode("nd1", "nd-1_prop1", "a value");
GraphNode nd2 = createTestNode("nd2", "nd-2_prop1", "different value");
GraphRelationship graphRel = BaseGraphComponentFactory.instanceForBaseGraphRelationship(nd1, nd2, "test", null);
MatchClause matchClause = new MatchClause();
GraphFragment matchPhrase = new GraphFragment();
matchPhrase.addNode(nd1);
matchPhrase.addNode(nd2);
matchPhrase.addRelationship(graphRel);
matchClause.addMatchPhrase(matchPhrase);
_cmp2Cy.processMatchClause(matchClause);
String resultStr = _cmp2Cy._strBuf.toString();
//System.out.println(resultStr);
String expectedStr = "MATCH (nd1:test-label { `nd1`:\"a value\" })-[:`test`]->(nd2:test-label { `nd2`:\"different value\" })";
assertTrue("testProcessMatchClause - result did not match expected, was: " + resultStr, expectedStr.equals(resultStr));
} catch (Exception e) {
System.out.println("testProcessMatchClause - problem");
e.printStackTrace();
fail("testProcessMatchClause - problem: " + e);
}
}
// process multiple match clauses
// process return clause
@Test
public void testProcessReturnClause() {
try {
GraphNode nd1 = createTestNode("nd1", null, null);
GraphNode nd2 = createTestNode("nd2", null, null);
GraphRelationship graphRel = BaseGraphComponentFactory.instanceForBaseGraphRelationship(nd1, nd2, "test", null);
graphRel.setLocalAlias("r1");
ReturnClause retClause = new ReturnClause();
retClause.addReturnGraphComponent(nd1);
retClause.addReturnGraphComponent(graphRel);
QueryComponents qryCmps = new QueryComponents();
qryCmps.setReturnClause(retClause);
String retStr = _cmp2Cy.generateReturnStatement(qryCmps);
assertNotNull("testProcessReturnClause - retStr is null", retStr);
String expectedRetStr = "\nRETURN `nd1`, labels(nd1) AS nd1Labels, `r1`";
assertTrue("testProcessReturnClause - return clause did not match expected, was: " + retStr, expectedRetStr.equals(retStr));
} catch (InformationProblem e) {
System.out.println("testProcessReturnClause - problem: " + e);
e.printStackTrace();
fail("testProcessReturnClause - ");
}
}
// full cypher clauses
@Test
public void testMatchAndReturn() {
try {
GraphNode nd1 = createTestNode("nd1", null, null);
MatchClause matchClause = new MatchClause();
GraphFragment matchPhrase = new GraphFragment();
matchPhrase.addNode(nd1);
matchClause.addMatchPhrase(matchPhrase);
ReturnClause retClause = new ReturnClause();
retClause.addReturnGraphComponent(nd1);
QueryComponents qryCmp = new QueryComponents();
qryCmp.addMatchClause(matchClause);
qryCmp.setReturnClause(retClause);
String cypherQry = _cmp2Cy.convertToCypher(qryCmp);
assertNotNull("testMatchAndReturn - cypherQry is null", cypherQry);
//
String expectedQryStr = "MATCH (nd1:test-label)\nRETURN `nd1`, labels(nd1) AS nd1Labels";
assertTrue("testMatchAndReturn - cypherQry did not match expected, was: " + cypherQry, expectedQryStr.equals(cypherQry));
} catch (Exception e) {
System.out.println("testMatchAndReturn - problem: " + e);
e.printStackTrace();
fail("testMatchAndReturn - problem: " + e);
}
}
// create clause
@Test
public void testCreateClauseEncodingNotUnique() {
QueryComponents queryComponents = new QueryComponents();
CreateClause createClause = new CreateClause();
GraphFragment createPhrase = new GraphFragment();
try {
GraphNode nd1 = createTestNode("nd1", null, null);
String testPropName = "test-prop";
String testPropValue = "a test value";
nd1.addPropertyValue(testPropName, testPropValue);
createPhrase.addNode(nd1);
createClause.addCreatePhrase(createPhrase);
queryComponents.addCreateClause(createClause);
_cmp2Cy.generateCreateStatements(queryComponents);
String encodedCreateClauseStr = _cmp2Cy._strBuf.toString();
// compare exptected to result
// CREATE (nd1:test-label { `test-prop`:"a test value" })
String expectedStr = "\nCREATE (nd1:test-label { `test-prop`:\"a test value\" })";
assertNotNull("testCreateClauseEncodingNotUnique - null encoded string", encodedCreateClauseStr);
assertTrue("testCreateClauseEncodingNotUnique - encoded didn't match expected, was: " + encodedCreateClauseStr, expectedStr.equals(encodedCreateClauseStr));
} catch (Exception e) {
System.out.println("testCreateClauseEncodingNotUnique - problem: " + e);
e.printStackTrace();
fail("testCreateClauseEncodingNotUnique - problem: " + e);
}
}
// util
protected List<Object> createValueList(Object ... propValues) {
List<Object> valList = new Vector<>();
for (Object valObj : propValues) {
valList.add(valObj);
}
return valList;
}
protected BaseGraphNode createTestNode(String alias, String propName, String propVal) throws InformationProblem {
List<String> labelList = new Vector<>();
labelList.add("test-label");
BaseGraphNode nd = BaseGraphComponentFactory.instanceForBaseGraphNode(labelList, null, alias);
nd.addPropertyValue(alias, propVal);
return nd;
}
@Before
public void setUp() {
_cmp2Cy = new Neo4jQueryComponentsToCypherConverter();
}
}
| apache-2.0 |
loverdos/thrift3r | src/main/scala/com/ckkloverdos/thrift3r/BinReprType.java | 1858 | /*
* Copyright (c) 2013 Christos KK Loverdos
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ckkloverdos.thrift3r;
/**
* Binary representation types: these are the types supported at the binary codec level.
*
* @author Christos KK Loverdos <loverdos@gmail.com>
*/
public enum BinReprType {
VOID ((byte) 0),
BOOL ((byte) 1), // Java: boolean
INT8 ((byte) 2), // Java: byte
INT16 ((byte) 3), // Java: short
INT32 ((byte) 4), // Java: int
INT64 ((byte) 5), // Java: long
FLOAT32((byte) 6), // Java: float
FLOAT64((byte) 7), // Java: double
STRING ((byte) 10),
SET ((byte) 20),
LIST ((byte) 21),
MAP ((byte) 22),
OPTION ((byte) 23), // Yes, we support options natively
ENUM ((byte) 30), // Simple, Java-like enums
STRUCT ((byte) 40);
public final byte brType;
private BinReprType(byte brType) {
this.brType = brType;
}
public boolean hasDirectStringRepresentation() {
switch(this) {
case BOOL:
case INT8:
case INT16:
case INT32:
case INT64:
case FLOAT32:
case FLOAT64:
case STRING:
case ENUM:
return true;
default:
return false;
}
}
}
| apache-2.0 |
DataDog/omnibus-software | config/software/futures.rb | 317 | name "futures"
default_version "2.2.0"
dependency "python"
dependency "pip"
build do
license "Python-2.0"
license_file "https://raw.githubusercontent.com/agronholm/pythonfutures/master/LICENSE"
pip "install --install-option=\"--install-scripts=#{windows_safe_path(install_dir)}/bin\" #{name}==#{version}"
end
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-iotthingsgraph/src/main/java/com/amazonaws/services/iotthingsgraph/model/transform/SystemTemplateSummaryMarshaller.java | 2969 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iotthingsgraph.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.iotthingsgraph.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* SystemTemplateSummaryMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class SystemTemplateSummaryMarshaller {
private static final MarshallingInfo<String> ID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("id").build();
private static final MarshallingInfo<String> ARN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("arn").build();
private static final MarshallingInfo<Long> REVISIONNUMBER_BINDING = MarshallingInfo.builder(MarshallingType.LONG)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("revisionNumber").build();
private static final MarshallingInfo<java.util.Date> CREATEDAT_BINDING = MarshallingInfo.builder(MarshallingType.DATE)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("createdAt").timestampFormat("unixTimestamp").build();
private static final SystemTemplateSummaryMarshaller instance = new SystemTemplateSummaryMarshaller();
public static SystemTemplateSummaryMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(SystemTemplateSummary systemTemplateSummary, ProtocolMarshaller protocolMarshaller) {
if (systemTemplateSummary == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(systemTemplateSummary.getId(), ID_BINDING);
protocolMarshaller.marshall(systemTemplateSummary.getArn(), ARN_BINDING);
protocolMarshaller.marshall(systemTemplateSummary.getRevisionNumber(), REVISIONNUMBER_BINDING);
protocolMarshaller.marshall(systemTemplateSummary.getCreatedAt(), CREATEDAT_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
viraintel/OWASP-Nettacker | lib/language/messages_fr.py | 17004 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
def all_messages():
"""
keep all messages in fr
Returns:
all messages in JSON
"""
return \
{
"scan_started": "Moteur Nettacker a commencé ...",
"options": "python nettacker.py [options]",
"help_menu": "Afficher le menu d'aide de Nettacker",
"license": "S'il vous plaît lire la licence et les accords https://github.com/zdresearch/OWASP-Nettacker",
"engine": "Moteur",
"engine_input": "Options de saisie du moteur",
"select_language": "sélectionner une langue {0}",
"range": "analyser toutes les adresses IP de la plage",
"subdomains": "Rechercher et analyser des sous-domaines",
"thread_number_connections": "numéros de thread pour les connexions à un hôte",
"thread_number_hosts": "numéros de thread pour les hôtes d'analyse",
"save_logs": "enregistrer tous les journaux dans le fichier (results.txt, results.html, results.json)",
"target": "Cible",
"target_input": "Options de saisie cible",
"target_list": "liste (s) cible (s), séparée par \",\"",
"read_target": "lire la (les) cible (s) à partir du fichier",
"scan_method_options": "Options de méthode de numérisation",
"choose_scan_method": "choisissez la méthode de scan {0}",
"exclude_scan_method": "choisissez la méthode de scan pour exclure {0}",
"username_list": "nom d'utilisateur (s), séparé par \",\"",
"username_from_file": "lire le (s) nom (s) d'utilisateur à partir du fichier",
"password_seperator": "mot de passe (s), séparé par \",\"",
"read_passwords": "lire le (s) mot de passe (s) du fichier",
"port_seperator": "port (s) list, séparé par \",\"",
"time_to_sleep": "le temps de dormir entre chaque demande",
"error_target": "Impossible de spécifier la ou les cibles",
"error_target_file": "Impossible de spécifier la (les) cible (s), impossible d'ouvrir le fichier: {0}",
"thread_number_warning": "il est préférable d'utiliser le numéro de fil"
" inférieur à 100, BTW nous continuons ...",
"set_timeout": "mettre timeout à {0} secondes, c'est trop gros, n'est-ce pas?"
" par la façon dont nous continuons ...",
"scan_module_not_found": "ce module de scan [{0}] n'a pas été trouvé!",
"error_exclude_all": "vous ne pouvez pas exclure toutes les méthodes de scan",
"exclude_module_error": "le module {0} que vous avez sélectionné pour exclure non trouvé!",
"method_inputs": "Entrez les entrées des méthodes, par exemple: ftp_brute_users ="
" test, admin & ftp_brute_passwds = read_from_file: /tmp/pass.txt&ftp_brute_port=21",
"error_reading_file": "Impossible de lire le fichier {0}",
"error_username": "Impossible de spécifier le (s) nom (s) d'utilisateur, "
"impossible d'ouvrir le fichier: {0}",
"found": "{0} trouvé ({1}: {2})",
"error_password_file": "Impossible de spécifier le (s) mot (s) de passe, impossible "
"d'ouvrir le fichier: {0}",
"file_write_error": "le fichier \"{0}\" n'est pas accessible en écriture!",
"scan_method_select": "veuillez choisir votre méthode de scan!",
"remove_temp": "enlever les fichiers temporaires!",
"sorting_results": "tri des résultats!",
"done": "terminé!",
"start_attack": "commencer à attaquer {0}, {1} sur {2}",
"module_not_available": "ce module \"{0}\" n'est pas disponible",
"error_platform": "Malheureusement, cette version du logiciel pourrait simplement être exécutée"
" sous linux / osx / windows.",
"python_version_error": "Votre version de Python n'est pas supportée!",
"skip_duplicate_target": "ignorer la cible en double (certains sous-domaines / domaines peuvent"
" avoir la même adresse IP et les mêmes plages)",
"unknown_target": "type de cible inconnu [{0}]",
"checking_range": "vérifier la plage {0} ...",
"checking": "vérification {0} ...",
"HOST": "HÔTE",
"USERNAME": "NOM D'UTILISATEUR",
"PASSWORD": "MOT DE PASSE",
"PORT": "PORT",
"TYPE": "TYPE",
"DESCRIPTION": "LA DESCRIPTION",
"verbose_level": "niveau de mode verbeux (0-5) (par défaut 0)",
"software_version": "afficher la version du logiciel",
"check_updates": "vérifier la mise à jour",
"outgoing_proxy": "connexions sortantes proxy (chaussettes). exemple socks5: 127.0.0.1:9050,"
" chaussettes: //127.0.0.1: 9050 socks5: //127.0.0.1: 9050 ou socks4:"
" socks4: //127.0.0.1: 9050, authentification: socks: // nom d'utilisateur: "
"mot de passe @ 127.0.0.1, socks4: // nom d'utilisateur: password@127.0.0.1, "
"socks5: // nom d'utilisateur: password@127.0.0.1",
"valid_socks_address": "s'il vous plaît entrer l'adresse de chaussettes valide et le port. "
"exemple socks5: 127.0.0.1:9050, socks: //127.0.0.1: 9050,"
" socks5: //127.0.0.1: 9050 ou socks4: socks4: //127.0.0.1: 9050,"
" authentification: socks: // nom d'utilisateur: mot de passe @ 127.0.0.1, "
"socks4: // nom d'utilisateur: password@127.0.0.1, socks5: // nom d'utilisateur:"
" password@127.0.0.1",
"connection_retries": "Réessaie lorsque le délai d'attente de connexion (par défaut 3)",
"ftp_connection_timeout": "connexion ftp à {0}: {1} délai d'expiration, ignorant {2}: {3}",
"login_successful": "CONNECTÉ AVEC SUCCÈS!",
"login_list_error": "CONNUS EN SUCCÈS, PERMISSION REFUSÉE POUR LA COMMANDE DE LISTE!",
"ftp_connection_failed": "La connexion ftp à {0}: {1} a échoué, ignorant l'étape entière [processus "
"{2} de {3}]! aller à la prochaine étape",
"input_target_error": "La cible d'entrée pour le module {0} doit être DOMAIN, HTTP ou SINGLE_IPv4,"
" en ignorant {1}",
"user_pass_found": "utilisateur: {0} passer: {1} hôte: {2} port: {3} trouvé!",
"file_listing_error": "(PAS DE PERMISSION POUR LES FICHIERS DE LISTE)",
"trying_message": "essayer {0} sur {1} dans le processus {2} de {3} {4}: {5} ({6})",
"smtp_connection_timeout": "Connexion smtp à {0}: {1} timeout, ignorez {2}: {3}",
"smtp_connection_failed": "La connexion smtp à {0}: {1} a échoué, en sautant l'étape entière [processus "
"{2} de {3}]! aller à la prochaine étape",
"ssh_connection_timeout": "Connexion ssh à {0}: {1} timeout, ignorez {2}: {3}",
"ssh_connection_failed": "La connexion ssh à {0}: {1} a échoué, en ignorant l'étape entière"
" [processus {2} de {3}]! aller à la prochaine étape",
"port/type": "{0} / {1}",
"port_found": "hôte: {0} port: {1} ({2}) trouvé!",
"target_submitted": "cible {0} soumise!",
"current_version": "vous utilisez la version OWASP Nettacker {0} {1} {2} {6} avec le "
"nom de code {3} {4} {5}",
"feature_unavailable": "cette fonctionnalité n'est pas encore disponible! S'il vous plaît exécuter"
" \"git clone https://github.com/zdresearch/OWASP-Nettacker.git ou pip "
"installer -U OWASP-Nettacker pour obtenir la dernière version.",
"available_graph": "construire un graphique de toutes les activités et informations, vous devez"
" utiliser la sortie HTML. graphiques disponibles: {0}",
"graph_output": "Pour utiliser la fonction graphique, votre nom de fichier de sortie doit se "
"terminer par \".html\" ou \".htm\"!",
"build_graph": "graphique de construction ...",
"finish_build_graph": "terminer le graphique de construction!",
"pentest_graphs": "Graphiques de test de pénétration",
"graph_message": "Ce graphique créé par OWASP Nettacker. Le graphique contient toutes les activités "
"des modules, la carte du réseau et les informations sensibles. Veuillez ne pas"
" partager ce fichier avec qui que ce soit s'il n'est pas fiable.",
"nettacker_report": "Rapport OWASP Nettacker",
"nettacker_version_details": "Détails sur le logiciel: OWASP Nettacker version {0} [{1}] dans {2}",
"no_open_ports": "aucun port ouvert trouvé!",
"no_user_passwords": "aucun utilisateur / mot de passe trouvé!",
"loaded_modules": "{0} modules chargés ...",
"graph_module_404": "ce module graphique n'est pas trouvé: {0}",
"graph_module_unavailable": "ce module graphique \"{0}\" n'est pas disponible",
"ping_before_scan": "ping avant de scanner l'hôte",
"skipping_target": "ignorer la cible entière {0} et la méthode de scan {1} à cause de --ping-before-scan "
"est vrai et n'a pas répondu!",
"not_last_version": "vous n'utilisez pas la dernière version d'OWASP Nettacker, veuillez mettre à jour.",
"cannot_update": "ne peut pas vérifier la mise à jour, s'il vous plaît vérifier votre connexion Internet.",
"last_version": "Vous utilisez la dernière version de OWASP Nettacker ...",
"directoy_listing": "liste de répertoires trouvée dans {0}",
"insert_port_message": "s'il vous plaît insérer le port à travers le commutateur -g ou --methods-args "
"au lieu de l'URL",
"http_connection_timeout": "Connexion http {0} timeout!",
"wizard_mode": "démarrer le mode assistant",
"directory_file_404": "aucun répertoire ou fichier trouvé pour {0} dans le port {1}",
"open_error": "impossible d'ouvrir {0}",
"dir_scan_get": "La valeur dir_scan_http_method doit être GET ou HEAD, définie par défaut sur GET.",
"list_methods": "liste toutes les méthodes args",
"module_args_error": "impossible d'obtenir les arguments du module {0}",
"trying_process": "essayer {0} sur {1} dans le processus {2} de {3} sur {4} ({5})",
"domain_found": "domaine trouvé: {0}",
"TIME": "TEMPS",
"CATEGORY": "CATÉGORIE",
"module_pattern_404": "ne trouve aucun module avec le modèle {0}!",
"enter_default": "veuillez entrer {0} | Par défaut [{1}]>",
"enter_choices_default": "veuillez entrer {0} | choix [{1}] | Par défaut [{2}]>",
"all_targets": "les cibles",
"all_thread_numbers": "le numéro de fil",
"out_file": "le nom du fichier de sortie",
"all_scan_methods": "les méthodes de scan",
"all_scan_methods_exclude": "les méthodes d'analyse pour exclure",
"all_usernames": "les noms d'utilisateur",
"all_passwords": "les mots de passe",
"timeout_seconds": "les secondes d'expiration",
"all_ports": "les numéros de port",
"all_verbose_level": "le niveau verbeux",
"all_socks_proxy": "le proxy des chaussettes",
"retries_number": "le nombre de tentatives",
"graph": "un graphique",
"subdomain_found": "sous-domaine trouvé: {0}",
"select_profile": "sélectionnez le profil {0}",
"profile_404": "le profil \"{0}\" n'a pas été trouvé!",
"waiting": "en attente de {0}",
"vulnerable": "vulnérable à {0}",
"target_vulnerable": "target {0}: {1} est vulnérable à {2}!",
"no_vulnerability_found": "aucune vulnérabilité trouvée! ({0})",
"Method": "Méthode",
"API": "API",
"API_options": "Options d'API",
"start_API": "démarrer le service API",
"API_host": "Adresse hôte de l'API",
"API_port": "Numéro de port API",
"API_debug": "Mode de débogage de l'API",
"API_access_key": "Clé d'accès à l'API",
"white_list_API": "autorisez simplement les hôtes de la liste blanche à se connecter à l'API",
"define_whie_list": "définir des hôtes de liste blanche, séparés par, (exemples: 127.0.0.1, 192.168.0.1/24,"
" 10.0.0.1-10.0.0.255)",
"gen_API_access_log": "générer un journal d'accès à l'API",
"API_access_log_file": "Nom du fichier journal de l'accès API",
"API_port_int": "Le port de l'API doit être un entier!",
"unknown_ip_input": "Type d'entrée inconnu, les types acceptés sont SINGLE_IPv4, RANGE_IPv4, CIDR_IPv4",
"API_key": "* Clé de l'API: {0}",
"ports_int": "les ports doivent être des entiers! (par exemple 80 || 80,1080 || 80,1080"
"-1300,9000,12000-15000)",
"through_API": "Grâce à l'API OWASP Nettacker",
"API_invalid": "clé API non valide",
"unauthorized_IP": "votre adresse IP n'est pas autorisée",
"not_found": "Pas trouvé!",
"no_subdomain_found": "subdomain_scan: aucun sous-domaine créé!",
"viewdns_domain_404": "viewdns_reverse_ip_lookup_scan: aucun domaine trouvé!",
"browser_session_valid": "votre session de navigateur est valide",
"browser_session_killed": "votre session de navigateur a été tuée",
"updating_database": "mettre à jour la base de données ...",
"database_connect_fail": "Impossible de se connecter à la base de données!",
"inserting_report_db": "insertion de rapport dans la base de données",
"inserting_logs_db": "insertion de journaux dans la base de données",
"removing_logs_db": "enlever les vieux logs de db",
"len_subdomain_found": "{0} sous-domaine (s) trouvé (s)!",
"len_domain_found": "{0} domaine (s) trouvé (s)!",
"phpmyadmin_dir_404": "pas de répertoire phpmyadmin trouvé!",
"DOS_send": "envoi de paquets DoS à {0}",
"host_up": "{0} est en hausse! Le temps nécessaire pour effectuer un ping est de {1}",
"host_down": "Impossible de pinguer {0}!",
"root_required": "cela doit être exécuté en tant que root",
"admin_scan_get": "La valeur admin_scan_http_method doit être GET ou HEAD, définie par défaut sur GET.",
"telnet_connection_timeout": "Connexion telnet à {0}: {1} délai d'expiration, ignorant {2}: {3}",
"telnet_connection_failed": "La connexion telnet à {0}: {1} a échoué, en sautant l'étape entière "
"[processus {2} de {3}]! aller à la prochaine étape",
"http_auth_success": "Succès de l'authentification de base http - hôte: {2}: {3}, utilisateur: {0}, "
"réussite: {1} trouvé!",
"http_auth_failed": "Échec de l'authentification de base HTTP à {0}: {3} utilisation de {1}: {2}",
"http_form_auth_success": "Succès d'authentification du formulaire http - hôte: {2}: {3}, utilisateur: "
"{0}, réussite: {1} trouvée!",
"http_form_auth_failed": "Échec de l'authentification du formulaire http à {0}: {3} à l'aide de {1}: {2}",
"http_ntlm_success": "Succès de l'authentification http ntlm - hôte: {2}: {3}, utilisateur: {0}, "
"réussite: {1} trouvée!",
"http_ntlm_failed": "Échec de l'authentification http ntlm à {0}: {3} à l'aide de {1}: {2}",
"no_response": "ne peut pas obtenir de réponse de la cible",
"category_framework": "catégorie: {0}, frameworks: {1} trouvé!",
"nothing_found": "rien trouvé sur {0} dans {1}!",
"no_auth": "Aucune autorisation trouvée sur {0}: {1}"
}
| apache-2.0 |
geomajas/geomajas-project-graphics | graphics/src/main/java/org/geomajas/graphics/client/action/BringToFrontAction.java | 1196 | /*
* This is part of Geomajas, a GIS framework, http://www.geomajas.org/.
*
* Copyright 2008-2015 Geosparc nv, http://www.geosparc.com/, Belgium.
*
* The program is available in open source according to the Apache
* License, Version 2.0. All contributions in this program are covered
* by the Geomajas Contributors License Agreement. For full licensing
* details, see LICENSE.txt in the project root.
*/
package org.geomajas.graphics.client.action;
import org.geomajas.graphics.client.object.GraphicsObject;
import org.geomajas.graphics.client.operation.BringToFrontOperation;
import org.geomajas.graphics.client.resource.GraphicsResource;
/**
* Action to delete a {@link GraphicsObject}.
*
* @author Jan De Moerloose
* @author Jan Venstermans
*
*/
public class BringToFrontAction extends AbstractAction {
@Override
protected String getDefaultLabel() {
return GraphicsResource.MESSAGES.actionLabelBringToFront();
}
@Override
public boolean supports(GraphicsObject object) {
return true;
}
@Override
public void execute(GraphicsObject object) {
getService().execute(new BringToFrontOperation(object, getService()));
}
}
| apache-2.0 |