repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
gwittel/platform | reporting/src/main/java/com/proofpoint/reporting/ReportUtils.java | 3030 | /*
* Copyright 2018 Proofpoint, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.proofpoint.reporting;
import java.io.BufferedWriter;
import java.io.IOException;
import java.util.Map.Entry;
import java.util.regex.Pattern;
public final class ReportUtils
{
private static final Pattern LABEL_NOT_ACCEPTED_CHARACTER_PATTERN = Pattern.compile("[^A-Za-z0-9_]");
private static final Pattern INITIAL_DIGIT_PATTERN = Pattern.compile("[0-9]");
private ReportUtils()
{}
public static boolean isReportable(Object value)
{
if (value instanceof Double) {
return !(((Double) value).isNaN() || ((Double) value).isInfinite());
}
if (value instanceof Float) {
return !(((Float) value).isNaN() || ((Float) value).isInfinite());
}
if (value instanceof Long) {
return !(value.equals(Long.MAX_VALUE) || value.equals(Long.MIN_VALUE));
}
if (value instanceof Integer) {
return !(value.equals(Integer.MAX_VALUE) || value.equals(Integer.MIN_VALUE));
}
if (value instanceof Short) {
return !(value.equals(Short.MAX_VALUE) || value.equals(Short.MIN_VALUE));
}
return true;
}
static void writeTags(BufferedWriter writer, Iterable<Entry<String, String>> tags)
throws IOException
{
char prefix = '{';
for (Entry<String, String> tag : tags) {
writer.append(prefix);
prefix = ',';
String label = LABEL_NOT_ACCEPTED_CHARACTER_PATTERN.matcher(tag.getKey()).replaceAll("_");
String value = tag.getValue();
if (INITIAL_DIGIT_PATTERN.matcher(label).lookingAt()) {
writer.append('_');
}
writer.write(label);
writer.append("=\"");
for (int i = 0; i < value.length(); i++) {
char c = value.charAt(i);
switch (c) {
case '\\':
writer.append("\\\\");
break;
case '\"':
writer.append("\\\"");
break;
case '\n':
writer.append("\\n");
break;
default:
writer.append(c);
}
}
writer.append("\"");
}
if (prefix == ',') {
writer.append('}');
}
}
}
| apache-2.0 |
lshain-android-source/tools-idea | xml/impl/src/com/intellij/codeInsight/template/emmet/nodes/MulOperationNode.java | 1941 | /*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.template.emmet.nodes;
import com.intellij.codeInsight.template.CustomTemplateCallback;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.List;
/**
* @author Eugene.Kudelevsky
*/
public class MulOperationNode extends ZenCodingNode {
private final ZenCodingNode myLeftOperand;
private final int myRightOperand;
public MulOperationNode(ZenCodingNode leftOperand, int rightOperand) {
myLeftOperand = leftOperand;
myRightOperand = rightOperand;
}
public ZenCodingNode getLeftOperand() {
return myLeftOperand;
}
public int getRightOperand() {
return myRightOperand;
}
@NotNull
@Override
public List<GenerationNode> expand(int numberInIteration,
int totalIterations, String surroundedText,
CustomTemplateCallback callback,
boolean insertSurroundedTextAtTheEnd, GenerationNode parent) {
List<GenerationNode> result = new ArrayList<GenerationNode>();
for (int i = 0; i < myRightOperand; i++) {
result.addAll(myLeftOperand.expand(i, myRightOperand, surroundedText, callback, insertSurroundedTextAtTheEnd, parent));
}
return result;
}
@Override
public String toString() {
return "*";
}
}
| apache-2.0 |
tlong2/amphtml | validator/java/src/test/java/dev/amp/validator/utils/AttributeSpecUtilsTest.java | 16381 | /*
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
/*
* Changes to the original project are Copyright 2019, Verizon Media Inc..
*/
package dev.amp.validator.utils;
import dev.amp.validator.ValidatorProtos;
import dev.amp.validator.Context;
import dev.amp.validator.ParsedAttrSpec;
import dev.amp.validator.ParsedAttrSpecs;
import dev.amp.validator.ParsedHtmlTag;
import dev.amp.validator.ParsedTagSpec;
import dev.amp.validator.ParsedValidatorRules;
import dev.amp.validator.TagStack;
import dev.amp.validator.css.CssValidationException;
import dev.amp.validator.exception.TagValidationException;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.xml.sax.Attributes;
import org.xml.sax.Locator;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
/**
* Test for {@link AttributeSpecUtils}
*
* @author GeorgeLuo
*/
public class AttributeSpecUtilsTest {
@Test
public void testIsUsedForTypeIdentifiers() {
final List<String> typeIdentifiers = new ArrayList<>();
final List<String> enabledBys = new ArrayList<>();
final List<String> disabledBys = new ArrayList<>();
typeIdentifiers.add("amp4email");
enabledBys.add("enablingId");
typeIdentifiers.add("transformed");
Assert.assertFalse(AttributeSpecUtils.isUsedForTypeIdentifiers(typeIdentifiers, enabledBys, disabledBys));
typeIdentifiers.add("enablingId");
Assert.assertTrue(AttributeSpecUtils.isUsedForTypeIdentifiers(typeIdentifiers, enabledBys, disabledBys));
enabledBys.clear();
typeIdentifiers.add("disablingId");
disabledBys.add("disablingId");
Assert.assertFalse(AttributeSpecUtils.isUsedForTypeIdentifiers(typeIdentifiers, enabledBys, disabledBys));
typeIdentifiers.clear();
Assert.assertTrue(AttributeSpecUtils.isUsedForTypeIdentifiers(typeIdentifiers, enabledBys, disabledBys));
enabledBys.clear();
disabledBys.clear();
Assert.assertTrue(AttributeSpecUtils.isUsedForTypeIdentifiers(typeIdentifiers, enabledBys, disabledBys));
}
// TODO : write assertions for mocks that touch notable objects
@Test
public void testValidateAttributes() {
// IMPLIED_LAYOUT_INVALID
final ValidatorProtos.AmpLayout.Builder ampLayoutBuilder = ValidatorProtos.AmpLayout.newBuilder();
ValidatorProtos.TagSpec.Builder tagSpecBuilder = ValidatorProtos.TagSpec.newBuilder();
tagSpecBuilder.setAmpLayout(ampLayoutBuilder.build());
ValidatorProtos.AttrSpec.Builder attrSpecBuilder = ValidatorProtos.AttrSpec.newBuilder();
attrSpecBuilder.setDeprecation("deprecationString");
List<ValidatorProtos.AttrSpec> attrSpecs = new ArrayList<>();
attrSpecs.add(attrSpecBuilder.build());
Map<String, ValidatorProtos.AttrSpec> attrsByName = new HashMap<>();
attrsByName.put("HTML", attrSpecBuilder.build());
ParsedTagSpec parsedTagSpec = Mockito.mock(ParsedTagSpec.class);
Mockito.when(parsedTagSpec.getSpec()).thenReturn(tagSpecBuilder.build());
Mockito.when(parsedTagSpec.getImplicitAttrspecs()).thenReturn(attrSpecs);
Mockito.when(parsedTagSpec.getAttrsByName()).thenReturn(attrsByName);
ParsedTagSpec bestMatchReferencePoint = Mockito.mock(ParsedTagSpec.class);
TagStack tagStack = Mockito.mock(TagStack.class);
ParsedAttrSpec parsedAttrSpec = Mockito.mock(ParsedAttrSpec.class);
Mockito.when(parsedAttrSpec.isUsedForTypeIdentifiers(Mockito.anyList())).thenReturn(true);
Mockito.when(parsedAttrSpec.getSpec()).thenReturn(attrSpecBuilder.build());
ParsedAttrSpecs parsedAttrSpecs = Mockito.mock(ParsedAttrSpecs.class);
Mockito.when(parsedAttrSpecs.getParsedAttrSpec("HTML", "htmlValue",
attrSpecBuilder.build())).thenReturn(parsedAttrSpec);
ParsedValidatorRules parsedValidatorRules = Mockito.mock(ParsedValidatorRules.class);
Mockito.when(parsedValidatorRules.getParsedAttrSpecs()).thenReturn(parsedAttrSpecs);
Context context = Mockito.mock(Context.class);
Mockito.when(context.getTagStack()).thenReturn(tagStack);
Mockito.when(context.getRules()).thenReturn(parsedValidatorRules);
Attributes attributes = Mockito.mock(Attributes.class);
Mockito.when(attributes.getLength()).thenReturn(1);
Mockito.when(attributes.getLocalName(0)).thenReturn("HTML");
Mockito.when(attributes.getValue(0)).thenReturn("htmlValue");
ParsedHtmlTag encounteredTag = Mockito.mock(ParsedHtmlTag.class);
Mockito.when(encounteredTag.upperName()).thenReturn("HTML");
Mockito.when(encounteredTag.attrs()).thenReturn(attributes);
ValidatorProtos.ValidationResult.Builder result = ValidatorProtos.ValidationResult.newBuilder();
try {
AttributeSpecUtils.validateAttributes(parsedTagSpec, bestMatchReferencePoint, context, encounteredTag, result);
} catch (TagValidationException | IOException | CssValidationException e) {
e.printStackTrace();
}
ArgumentCaptor<List> listCaptor = ArgumentCaptor.forClass(List.class);
ArgumentCaptor<ValidatorProtos.ValidationError.Code> errorCodeCapture = ArgumentCaptor.forClass(ValidatorProtos.ValidationError.Code.class);
ArgumentCaptor<ValidatorProtos.ValidationError.Code> warningCodeCapture = ArgumentCaptor.forClass(ValidatorProtos.ValidationError.Code.class);
Mockito.verify(context, Mockito.times(1)).addError(errorCodeCapture.capture(),
Mockito.any(Locator.class),
listCaptor.capture(),
Mockito.anyString(),
Mockito.any(ValidatorProtos.ValidationResult.Builder.class));
Mockito.verify(context, Mockito.times(1)).addWarning(warningCodeCapture.capture(),
Mockito.any(Locator.class),
listCaptor.capture(),
Mockito.anyString(),
Mockito.any(ValidatorProtos.ValidationResult.Builder.class));
Assert.assertEquals(errorCodeCapture.getValue(), ValidatorProtos.ValidationError.Code.IMPLIED_LAYOUT_INVALID);
Assert.assertEquals(warningCodeCapture.getValue(), ValidatorProtos.ValidationError.Code.DEPRECATED_ATTR);
// DISALLOWED_ATTR
tagSpecBuilder = ValidatorProtos.TagSpec.newBuilder();
attrSpecBuilder = ValidatorProtos.AttrSpec.newBuilder();
// attrSpecBuilder.setDeprecation("deprecationString");
attrSpecs = new ArrayList<>();
attrSpecs.add(attrSpecBuilder.build());
attrsByName = new HashMap<>();
attrsByName.put("HTML", attrSpecBuilder.build());
parsedTagSpec = Mockito.mock(ParsedTagSpec.class);
Mockito.when(parsedTagSpec.getSpec()).thenReturn(tagSpecBuilder.build());
Mockito.when(parsedTagSpec.getImplicitAttrspecs()).thenReturn(attrSpecs);
Mockito.when(parsedTagSpec.getAttrsByName()).thenReturn(attrsByName);
bestMatchReferencePoint = Mockito.mock(ParsedTagSpec.class);
tagStack = Mockito.mock(TagStack.class);
parsedAttrSpec = Mockito.mock(ParsedAttrSpec.class);
Mockito.when(parsedAttrSpec.isUsedForTypeIdentifiers(Mockito.anyList())).thenReturn(false);
Mockito.when(parsedAttrSpec.getSpec()).thenReturn(attrSpecBuilder.build());
parsedAttrSpecs = Mockito.mock(ParsedAttrSpecs.class);
Mockito.when(parsedAttrSpecs.getParsedAttrSpec("HTML", "htmlValue", attrSpecBuilder.build())).thenReturn(parsedAttrSpec);
parsedValidatorRules = Mockito.mock(ParsedValidatorRules.class);
Mockito.when(parsedValidatorRules.getParsedAttrSpecs()).thenReturn(parsedAttrSpecs);
context = Mockito.mock(Context.class);
Mockito.when(context.getTagStack()).thenReturn(tagStack);
Mockito.when(context.getRules()).thenReturn(parsedValidatorRules);
attributes = Mockito.mock(Attributes.class);
Mockito.when(attributes.getLength()).thenReturn(1);
Mockito.when(attributes.getLocalName(0)).thenReturn("HTML");
Mockito.when(attributes.getValue(0)).thenReturn("htmlValue");
encounteredTag = Mockito.mock(ParsedHtmlTag.class);
Mockito.when(encounteredTag.upperName()).thenReturn("HTML");
Mockito.when(encounteredTag.attrs()).thenReturn(attributes);
result = ValidatorProtos.ValidationResult.newBuilder();
try {
AttributeSpecUtils.validateAttributes(parsedTagSpec, bestMatchReferencePoint, context, encounteredTag, result);
} catch (TagValidationException | IOException | CssValidationException e) {
e.printStackTrace();
}
listCaptor = ArgumentCaptor.forClass(List.class);
errorCodeCapture = ArgumentCaptor.forClass(ValidatorProtos.ValidationError.Code.class);
Mockito.verify(context, Mockito.times(1)).addError(errorCodeCapture.capture(),
Mockito.any(Locator.class),
listCaptor.capture(),
Mockito.anyString(),
Mockito.any(ValidatorProtos.ValidationResult.Builder.class));
Assert.assertEquals(errorCodeCapture.getValue(), ValidatorProtos.ValidationError.Code.DISALLOWED_ATTR);
// INVALID_ATTR_VALUE
tagSpecBuilder = ValidatorProtos.TagSpec.newBuilder();
attrSpecBuilder = ValidatorProtos.AttrSpec.newBuilder();
attrSpecBuilder.setBlacklistedValueRegex("");
attrSpecs = new ArrayList<>();
attrSpecs.add(attrSpecBuilder.build());
attrsByName = new HashMap<>();
attrsByName.put("HTML", attrSpecBuilder.build());
parsedTagSpec = Mockito.mock(ParsedTagSpec.class);
Mockito.when(parsedTagSpec.getSpec()).thenReturn(tagSpecBuilder.build());
Mockito.when(parsedTagSpec.getImplicitAttrspecs()).thenReturn(attrSpecs);
Mockito.when(parsedTagSpec.getAttrsByName()).thenReturn(attrsByName);
bestMatchReferencePoint = Mockito.mock(ParsedTagSpec.class);
tagStack = Mockito.mock(TagStack.class);
parsedAttrSpec = Mockito.mock(ParsedAttrSpec.class);
Mockito.when(parsedAttrSpec.isUsedForTypeIdentifiers(Mockito.anyList())).thenReturn(true);
Mockito.when(parsedAttrSpec.getSpec()).thenReturn(attrSpecBuilder.build());
parsedAttrSpecs = Mockito.mock(ParsedAttrSpecs.class);
Mockito.when(parsedAttrSpecs.getParsedAttrSpec("HTML", "htmlValue", attrSpecBuilder.build())).thenReturn(parsedAttrSpec);
parsedValidatorRules = Mockito.mock(ParsedValidatorRules.class);
Mockito.when(parsedValidatorRules.getParsedAttrSpecs()).thenReturn(parsedAttrSpecs);
Mockito.when(parsedValidatorRules.getPartialMatchCaseiRegex(Mockito.anyString())).thenReturn(Pattern.compile(""));
context = Mockito.mock(Context.class);
Mockito.when(context.getTagStack()).thenReturn(tagStack);
Mockito.when(context.getRules()).thenReturn(parsedValidatorRules);
attributes = Mockito.mock(Attributes.class);
Mockito.when(attributes.getLength()).thenReturn(1);
Mockito.when(attributes.getLocalName(0)).thenReturn("HTML");
Mockito.when(attributes.getValue(0)).thenReturn("htmlValue");
encounteredTag = Mockito.mock(ParsedHtmlTag.class);
Mockito.when(encounteredTag.upperName()).thenReturn("HTML");
Mockito.when(encounteredTag.attrs()).thenReturn(attributes);
result = ValidatorProtos.ValidationResult.newBuilder();
try {
AttributeSpecUtils.validateAttributes(parsedTagSpec, bestMatchReferencePoint, context, encounteredTag, result);
} catch (TagValidationException | IOException | CssValidationException e) {
e.printStackTrace();
}
listCaptor = ArgumentCaptor.forClass(List.class);
errorCodeCapture = ArgumentCaptor.forClass(ValidatorProtos.ValidationError.Code.class);
Mockito.verify(context, Mockito.times(1)).addError(errorCodeCapture.capture(),
Mockito.any(Locator.class),
listCaptor.capture(),
Mockito.anyString(),
Mockito.any(ValidatorProtos.ValidationResult.Builder.class));
Assert.assertEquals(errorCodeCapture.getValue(), ValidatorProtos.ValidationError.Code.INVALID_ATTR_VALUE);
// BASE_TAG_MUST_PRECEED_ALL_URLS
tagSpecBuilder = ValidatorProtos.TagSpec.newBuilder();
tagSpecBuilder.setTagName("BASE");
attrSpecBuilder = ValidatorProtos.AttrSpec.newBuilder();
attrSpecs = new ArrayList<>();
attrSpecs.add(attrSpecBuilder.build());
attrsByName = new HashMap<>();
attrsByName.put("attr", attrSpecBuilder.build());
attrsByName.put("href", attrSpecBuilder.build());
parsedTagSpec = Mockito.mock(ParsedTagSpec.class);
Mockito.when(parsedTagSpec.getSpec()).thenReturn(tagSpecBuilder.build());
Mockito.when(parsedTagSpec.getImplicitAttrspecs()).thenReturn(attrSpecs);
Mockito.when(parsedTagSpec.getAttrsByName()).thenReturn(attrsByName);
bestMatchReferencePoint = Mockito.mock(ParsedTagSpec.class);
tagStack = Mockito.mock(TagStack.class);
parsedAttrSpec = Mockito.mock(ParsedAttrSpec.class);
Mockito.when(parsedAttrSpec.isUsedForTypeIdentifiers(Mockito.anyList())).thenReturn(true);
Mockito.when(parsedAttrSpec.getSpec()).thenReturn(attrSpecBuilder.build());
parsedAttrSpecs = Mockito.mock(ParsedAttrSpecs.class);
Mockito.when(parsedAttrSpecs.getParsedAttrSpec("attr", "attrValue", attrSpecBuilder.build())).thenReturn(parsedAttrSpec);
Mockito.when(parsedAttrSpecs.getParsedAttrSpec("href", "hrefValue", attrSpecBuilder.build())).thenReturn(parsedAttrSpec);
parsedValidatorRules = Mockito.mock(ParsedValidatorRules.class);
Mockito.when(parsedValidatorRules.getParsedAttrSpecs()).thenReturn(parsedAttrSpecs);
Mockito.when(parsedValidatorRules.getPartialMatchCaseiRegex(Mockito.anyString())).thenReturn(Pattern.compile(""));
context = Mockito.mock(Context.class);
Mockito.when(context.getTagStack()).thenReturn(tagStack);
Mockito.when(context.getRules()).thenReturn(parsedValidatorRules);
Mockito.when(context.hasSeenUrl()).thenReturn(true);
attributes = Mockito.mock(Attributes.class);
Mockito.when(attributes.getLength()).thenReturn(1);
Mockito.when(attributes.getLocalName(0)).thenReturn("href");
Mockito.when(attributes.getValue(0)).thenReturn("hrefValue");
encounteredTag = Mockito.mock(ParsedHtmlTag.class);
Mockito.when(encounteredTag.upperName()).thenReturn("BASE");
Mockito.when(encounteredTag.attrs()).thenReturn(attributes);
result = ValidatorProtos.ValidationResult.newBuilder();
try {
AttributeSpecUtils.validateAttributes(parsedTagSpec, bestMatchReferencePoint, context, encounteredTag, result);
} catch (TagValidationException | IOException | CssValidationException e) {
e.printStackTrace();
}
listCaptor = ArgumentCaptor.forClass(List.class);
errorCodeCapture = ArgumentCaptor.forClass(ValidatorProtos.ValidationError.Code.class);
Mockito.verify(context, Mockito.times(1)).addError(errorCodeCapture.capture(),
Mockito.any(Locator.class),
listCaptor.capture(),
Mockito.anyString(),
Mockito.any(ValidatorProtos.ValidationResult.Builder.class));
Assert.assertEquals(errorCodeCapture.getValue(), ValidatorProtos.ValidationError.Code.BASE_TAG_MUST_PRECEED_ALL_URLS);
}
@Test
public void testValidateAttrRequiredExtensions() {
}
@Test
public void testValidateAttrDeclaration() {
}
@Test
public void testAttrValueHasTemplateSyntax() {
}
@Test
public void testValidateNonTemplateAttrValueAgainstSpec() {
}
@Test
public void testValidateAttrValueProperties() {
}
@Test
public void testValidateAttrValueUrl() {
}
@Test
public void testValidateUrlAndProtocol() {
}
@Test
public void testValidateLayout() {
}
@Test
public void testValidateAttributeInExtension() {
}
@Test
public void testGetExtensionNameAttribute() {
}
@Test
public void testValidateAttrNotFoundInSpec() {
}
@Test
public void testValidateAttrValueBelowTemplateTag() {
}
@Test
public void testAttrValueHasPartialsTemplateSyntax() {
}
@Test
public void testAttrValueHasUnescapedTemplateSyntax() {
}
}
| apache-2.0 |
openstack/powervc-driver | nova-powervc/test/hostmaintenanceclient/v1_1/__init__.py | 27 | # Copyright 2014 IBM Corp.
| apache-2.0 |
keith-turner/accumulo | core/src/test/java/org/apache/accumulo/core/client/lexicoder/SequenceLexicoderTest.java | 3192 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.client.lexicoder;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.List;
import java.util.TreeSet;
import org.apache.accumulo.core.clientImpl.lexicoder.AbstractLexicoderTest;
import org.apache.accumulo.core.util.TextUtil;
import org.apache.hadoop.io.Text;
import org.junit.Test;
/**
* Unit tests for {@link SequenceLexicoder}.
*/
public class SequenceLexicoderTest extends AbstractLexicoderTest {
private final List<String> nodata = emptyList();
private final List<String> data0 = singletonList("");
private final List<String> data1 = asList("a", "b");
private final List<String> data2 = singletonList("a");
private final List<String> data3 = asList("a", "c");
private final List<String> data4 = asList("a", "b", "c");
private final List<String> data5 = asList("b", "a");
@Test
public void testSortOrder() {
// expected sort order
final List<List<String>> data = asList(nodata, data0, data2, data1, data4, data3, data5);
final TreeSet<Text> sortedEnc = new TreeSet<>();
final SequenceLexicoder<String> sequenceLexicoder =
new SequenceLexicoder<>(new StringLexicoder());
for (final List<String> list : data) {
sortedEnc.add(new Text(sequenceLexicoder.encode(list)));
}
final List<List<String>> unenc = new ArrayList<>();
for (final Text enc : sortedEnc) {
unenc.add(sequenceLexicoder.decode(TextUtil.getBytes(enc)));
}
assertEquals(data, unenc);
}
@Test
public void testDecodes() {
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), nodata);
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), data0);
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), data1);
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), data2);
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), data3);
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), data4);
assertDecodes(new SequenceLexicoder<>(new StringLexicoder()), data5);
}
@Test(expected = IllegalArgumentException.class)
public void tesRejectsTrailingBytes() {
new SequenceLexicoder<>(new StringLexicoder()).decode(new byte[] {10});
}
}
| apache-2.0 |
gladyscarrizales/manifoldcf | connectors/filenet/build-stub/src/main/java/com/filenet/api/collection/PropertyDescriptionList.java | 1073 | /* $Id: AccessPermissionList.java 1342799 2012-05-25 20:25:51Z kwright $ */
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.filenet.api.collection;
/** Stub interface to allow the connector to build fully.
*/
public interface PropertyDescriptionList extends DependentObjectList, EngineCollection
{
}
| apache-2.0 |
wwjiang007/flink | flink-end-to-end-tests/flink-end-to-end-tests-pulsar/src/test/java/org/apache/flink/tests/util/pulsar/common/FlinkContainerWithPulsarEnvironment.java | 2311 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.tests.util.pulsar.common;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.tests.util.TestUtils;
import org.apache.flink.tests.util.flink.FlinkContainerTestEnvironment;
import static org.apache.flink.configuration.TaskManagerOptions.TASK_OFF_HEAP_MEMORY;
/** A Flink Container which would bundles pulsar connector in its classpath. */
public class FlinkContainerWithPulsarEnvironment extends FlinkContainerTestEnvironment {
public FlinkContainerWithPulsarEnvironment(int numTaskManagers, int numSlotsPerTaskManager) {
super(
flinkConfiguration(),
numTaskManagers,
numSlotsPerTaskManager,
resourcePath("pulsar-connector.jar"),
resourcePath("pulsar-client-all.jar"),
resourcePath("pulsar-client-api.jar"),
resourcePath("pulsar-admin-api.jar"),
resourcePath("jul-to-slf4j.jar"));
}
private static String resourcePath(String jarName) {
return TestUtils.getResource(jarName).toAbsolutePath().toString();
}
protected static Configuration flinkConfiguration() {
Configuration configuration = new Configuration();
// Increase the off heap memory for avoiding direct buffer memory error on Pulsar e2e tests.
configuration.set(TASK_OFF_HEAP_MEMORY, MemorySize.ofMebiBytes(100));
return configuration;
}
}
| apache-2.0 |
jrwest/cassandra | test/simulator/main/org/apache/cassandra/simulator/systems/Failures.java | 2043 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.simulator.systems;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.function.Consumer;
import org.apache.cassandra.utils.concurrent.Ref;
import org.apache.cassandra.utils.memory.BufferPool;
/**
* A simple encapsulation for capturing and reporting failures during the simulation
*/
public class Failures implements Consumer<Throwable>, BufferPool.DebugLeaks, Ref.OnLeak
{
private final List<Throwable> failures = Collections.synchronizedList(new ArrayList<>());
private volatile boolean hasFailure;
public void onFailure(Throwable t)
{
failures.add(t);
hasFailure = true;
}
public boolean hasFailure()
{
return hasFailure;
}
public List<Throwable> get()
{
return Collections.unmodifiableList(failures);
}
@Override
public void accept(Throwable throwable)
{
onFailure(throwable);
}
@Override
public void leak()
{
failures.add(new AssertionError("ChunkCache leak detected"));
}
@Override
public void onLeak(Object state)
{
failures.add(new AssertionError("Ref leak detected " + state.toString()));
}
}
| apache-2.0 |
palecur/elasticsearch | test/framework/src/main/java/org/elasticsearch/test/rest/section/DoSection.java | 6660 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.rest.section;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.rest.RestTestExecutionContext;
import org.elasticsearch.test.rest.client.RestTestResponse;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.common.collect.Tuple.tuple;
import static org.elasticsearch.test.hamcrest.RegexMatcher.matches;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
/**
* Represents a do section:
*
* - do:
* catch: missing
* headers:
* Authorization: Basic user:pass
* Content-Type: application/json
* update:
* index: test_1
* type: test
* id: 1
* body: { doc: { foo: bar } }
*
*/
public class DoSection implements ExecutableSection {
private static final ESLogger logger = Loggers.getLogger(DoSection.class);
private String catchParam;
private ApiCallSection apiCallSection;
public String getCatch() {
return catchParam;
}
public void setCatch(String catchParam) {
this.catchParam = catchParam;
}
public ApiCallSection getApiCallSection() {
return apiCallSection;
}
public void setApiCallSection(ApiCallSection apiCallSection) {
this.apiCallSection = apiCallSection;
}
@Override
public void execute(RestTestExecutionContext executionContext) throws IOException {
if ("param".equals(catchParam)) {
//client should throw validation error before sending request
//lets just return without doing anything as we don't have any client to test here
logger.info("found [catch: param], no request sent");
return;
}
try {
RestTestResponse restTestResponse = executionContext.callApi(apiCallSection.getApi(), apiCallSection.getParams(),
apiCallSection.getBodies(), apiCallSection.getHeaders());
if (Strings.hasLength(catchParam)) {
String catchStatusCode;
if (catches.containsKey(catchParam)) {
catchStatusCode = catches.get(catchParam).v1();
} else if (catchParam.startsWith("/") && catchParam.endsWith("/")) {
catchStatusCode = "4xx|5xx";
} else {
throw new UnsupportedOperationException("catch value [" + catchParam + "] not supported");
}
fail(formatStatusCodeMessage(restTestResponse, catchStatusCode));
}
} catch(ResponseException e) {
RestTestResponse restTestResponse = new RestTestResponse(e);
if (!Strings.hasLength(catchParam)) {
fail(formatStatusCodeMessage(restTestResponse, "2xx"));
} else if (catches.containsKey(catchParam)) {
assertStatusCode(restTestResponse);
} else if (catchParam.length() > 2 && catchParam.startsWith("/") && catchParam.endsWith("/")) {
//the text of the error message matches regular expression
assertThat(formatStatusCodeMessage(restTestResponse, "4xx|5xx"),
e.getResponse().getStatusLine().getStatusCode(), greaterThanOrEqualTo(400));
Object error = executionContext.response("error");
assertThat("error was expected in the response", error, notNullValue());
//remove delimiters from regex
String regex = catchParam.substring(1, catchParam.length() - 1);
assertThat("the error message was expected to match the provided regex but didn't",
error.toString(), matches(regex));
} else {
throw new UnsupportedOperationException("catch value [" + catchParam + "] not supported");
}
}
}
private void assertStatusCode(RestTestResponse restTestResponse) {
Tuple<String, org.hamcrest.Matcher<Integer>> stringMatcherTuple = catches.get(catchParam);
assertThat(formatStatusCodeMessage(restTestResponse, stringMatcherTuple.v1()),
restTestResponse.getStatusCode(), stringMatcherTuple.v2());
}
private String formatStatusCodeMessage(RestTestResponse restTestResponse, String expected) {
String api = apiCallSection.getApi();
if ("raw".equals(api)) {
api += "[method=" + apiCallSection.getParams().get("method") + " path=" + apiCallSection.getParams().get("path") + "]";
}
return "expected [" + expected + "] status code but api [" + api + "] returned [" + restTestResponse.getStatusCode() +
" " + restTestResponse.getReasonPhrase() + "] [" + restTestResponse.getBodyAsString() + "]";
}
private static Map<String, Tuple<String, org.hamcrest.Matcher<Integer>>> catches = new HashMap<>();
static {
catches.put("missing", tuple("404", equalTo(404)));
catches.put("conflict", tuple("409", equalTo(409)));
catches.put("forbidden", tuple("403", equalTo(403)));
catches.put("request_timeout", tuple("408", equalTo(408)));
catches.put("unavailable", tuple("503", equalTo(503)));
catches.put("request", tuple("4xx|5xx",
allOf(greaterThanOrEqualTo(400), not(equalTo(404)), not(equalTo(408)), not(equalTo(409)), not(equalTo(403)))));
}
}
| apache-2.0 |
aslakknutsen/fabric8 | sandbox/fabric/fabric-commands/src/main/java/io/fabric8/commands/support/PidCompleter.java | 4775 | /**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.commands.support;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import io.fabric8.api.scr.AbstractComponent;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.Service;
import org.apache.karaf.shell.console.Completer;
import org.apache.karaf.shell.console.completer.StringsCompleter;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.service.cm.Configuration;
import org.osgi.service.cm.ConfigurationAdmin;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Component(immediate = true)
@Service({PidCompleter.class, Completer.class})
public class PidCompleter extends AbstractComponent implements Completer {
private static final Logger LOGGER = LoggerFactory.getLogger(PidCompleter.class);
private final StringsCompleter delegate = new StringsCompleter();
@Reference
private ConfigurationAdmin configurationAdmin;
@Activate
void activate() {
Configuration[] configs;
try {
configs = configurationAdmin.listConfigurations(null);
if (configs == null) {
return;
}
} catch (Exception e) {
return;
}
Collection<String> pids = new ArrayList<String>();
for (Configuration config : configs) {
delegate.getStrings().addAll(getPidWithKeys(config.getPid()));
}
activateComponent();
}
@Deactivate
void deactivate() {
deactivateComponent();
}
public ConfigurationAdmin getConfigurationAdmin() {
return configurationAdmin;
}
public void setConfigurationAdmin(ConfigurationAdmin configurationAdmin) {
this.configurationAdmin = configurationAdmin;
}
public int complete(final String buffer, final int cursor, final List candidates) {
int firstPass = delegate.complete(buffer, cursor, candidates);
if (firstPass < 0) {
updateAllPids();
return delegate.complete(buffer, cursor, candidates);
} else {
return firstPass;
}
}
/**
* Updates all Pids.
*/
private void updateAllPids() {
Configuration[] configurations = null;
try {
configurations = configurationAdmin.listConfigurations(null);
if (configurations != null) {
for (Configuration configuration:configurations) {
delegate.getStrings().addAll(getPidWithKeys(configuration.getPid()));
}
}
} catch (Exception e) {
LOGGER.warn("Could not lookup pids from configuration admin.");
}
}
/**
* Returns a Set of Stings that contains all keys of the pid prefixed with the pid itself.
*/
private Set<String> getPidWithKeys(String pid) {
Set<String> pidWithKeys = new LinkedHashSet<String>();
try {
Configuration[] configuration = configurationAdmin.listConfigurations("(service.pid=" + pid + ")");
if (configuration != null && configuration.length > 0) {
Dictionary dictionary = configuration[0].getProperties();
if (dictionary != null) {
Enumeration keyEnumeration = dictionary.keys();
while (keyEnumeration.hasMoreElements()) {
String key = (String) keyEnumeration.nextElement();
pidWithKeys.add(pid+"/"+key);
}
}
}
} catch (IOException e) {
LOGGER.warn("Could not lookup pid {} from configuration admin.",pid);
} catch (InvalidSyntaxException e) {
LOGGER.warn("Could not lookup pid {} from configuration admin.",pid);
}
return pidWithKeys;
}
}
| apache-2.0 |
Uli1/node-gdal | deps/libgdal/gdal/frmts/pdf/pdfcreatecopy.cpp | 180224 | /******************************************************************************
* $Id: pdfcreatecopy.cpp 28780 2015-03-26 12:29:35Z rouault $
*
* Project: PDF driver
* Purpose: GDALDataset driver for PDF dataset.
* Author: Even Rouault, <even dot rouault at mines dash paris dot org>
*
******************************************************************************
* Copyright (c) 2012-2014, Even Rouault <even dot rouault at mines-paris dot org>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
****************************************************************************/
#include "gdal_pdf.h"
#include "pdfcreatecopy.h"
#include "cpl_vsi_virtual.h"
#include "cpl_conv.h"
#include "cpl_error.h"
#include "ogr_spatialref.h"
#include "ogr_geometry.h"
#include "vrtdataset.h"
#include "pdfobject.h"
#ifndef M_PI
#define M_PI 3.14159265358979323846
#endif
/* Cf PDF reference v1.7, Appendix C, page 993 */
#define MAXIMUM_SIZE_IN_UNITS 14400
CPL_CVSID("$Id: pdfcreatecopy.cpp 28780 2015-03-26 12:29:35Z rouault $");
#define PIXEL_TO_GEO_X(x,y) adfGeoTransform[0] + x * adfGeoTransform[1] + y * adfGeoTransform[2]
#define PIXEL_TO_GEO_Y(x,y) adfGeoTransform[3] + x * adfGeoTransform[4] + y * adfGeoTransform[5]
class GDALFakePDFDataset : public GDALDataset
{
public:
GDALFakePDFDataset() {}
};
/************************************************************************/
/* Init() */
/************************************************************************/
void GDALPDFWriter::Init()
{
nPageResourceId = 0;
nStructTreeRootId = 0;
nCatalogId = nCatalogGen = 0;
bInWriteObj = FALSE;
nInfoId = nInfoGen = 0;
nXMPId = nXMPGen = 0;
nNamesId = 0;
nLastStartXRef = 0;
nLastXRefSize = 0;
bCanUpdate = FALSE;
}
/************************************************************************/
/* GDALPDFWriter() */
/************************************************************************/
GDALPDFWriter::GDALPDFWriter(VSILFILE* fpIn, int bAppend) : fp(fpIn)
{
Init();
if (!bAppend)
{
VSIFPrintfL(fp, "%%PDF-1.6\n");
/* See PDF 1.7 reference, page 92. Write 4 non-ASCII bytes to indicate that the content will be binary */
VSIFPrintfL(fp, "%%%c%c%c%c\n", 0xFF, 0xFF, 0xFF, 0xFF);
nPageResourceId = AllocNewObject();
nCatalogId = AllocNewObject();
}
}
/************************************************************************/
/* ~GDALPDFWriter() */
/************************************************************************/
GDALPDFWriter::~GDALPDFWriter()
{
Close();
}
/************************************************************************/
/* ParseIndirectRef() */
/************************************************************************/
static int ParseIndirectRef(const char* pszStr, int& nNum, int &nGen)
{
while(*pszStr == ' ')
pszStr ++;
nNum = atoi(pszStr);
while(*pszStr >= '0' && *pszStr <= '9')
pszStr ++;
if (*pszStr != ' ')
return FALSE;
while(*pszStr == ' ')
pszStr ++;
nGen = atoi(pszStr);
while(*pszStr >= '0' && *pszStr <= '9')
pszStr ++;
if (*pszStr != ' ')
return FALSE;
while(*pszStr == ' ')
pszStr ++;
return *pszStr == 'R';
}
/************************************************************************/
/* ParseTrailerAndXRef() */
/************************************************************************/
int GDALPDFWriter::ParseTrailerAndXRef()
{
VSIFSeekL(fp, 0, SEEK_END);
char szBuf[1024+1];
vsi_l_offset nOffset = VSIFTellL(fp);
if (nOffset > 128)
nOffset -= 128;
else
nOffset = 0;
/* Find startxref section */
VSIFSeekL(fp, nOffset, SEEK_SET);
int nRead = (int) VSIFReadL(szBuf, 1, 128, fp);
szBuf[nRead] = 0;
if (nRead < 9)
return FALSE;
const char* pszStartXRef = NULL;
int i;
for(i = nRead - 9; i>= 0; i --)
{
if (strncmp(szBuf + i, "startxref", 9) == 0)
{
pszStartXRef = szBuf + i;
break;
}
}
if (pszStartXRef == NULL)
{
CPLError(CE_Failure, CPLE_AppDefined, "Cannot find startxref");
return FALSE;
}
pszStartXRef += 9;
while(*pszStartXRef == '\r' || *pszStartXRef == '\n')
pszStartXRef ++;
if (*pszStartXRef == '\0')
{
CPLError(CE_Failure, CPLE_AppDefined, "Cannot find startxref");
return FALSE;
}
nLastStartXRef = CPLScanUIntBig(pszStartXRef,16);
/* Skip to beginning of xref section */
VSIFSeekL(fp, nLastStartXRef, SEEK_SET);
/* And skip to trailer */
const char* pszLine;
while( (pszLine = CPLReadLineL(fp)) != NULL)
{
if (strncmp(pszLine, "trailer", 7) == 0)
break;
}
if( pszLine == NULL )
{
CPLError(CE_Failure, CPLE_AppDefined, "Cannot find trailer");
return FALSE;
}
/* Read trailer content */
nRead = (int) VSIFReadL(szBuf, 1, 1024, fp);
szBuf[nRead] = 0;
/* Find XRef size */
const char* pszSize = strstr(szBuf, "/Size");
if (pszSize == NULL)
{
CPLError(CE_Failure, CPLE_AppDefined, "Cannot find trailer /Size");
return FALSE;
}
pszSize += 5;
while(*pszSize == ' ')
pszSize ++;
nLastXRefSize = atoi(pszSize);
/* Find Root object */
const char* pszRoot = strstr(szBuf, "/Root");
if (pszRoot == NULL)
{
CPLError(CE_Failure, CPLE_AppDefined, "Cannot find trailer /Root");
return FALSE;
}
pszRoot += 5;
while(*pszRoot == ' ')
pszRoot ++;
if (!ParseIndirectRef(pszRoot, nCatalogId, nCatalogGen))
{
CPLError(CE_Failure, CPLE_AppDefined, "Cannot parse trailer /Root");
return FALSE;
}
/* Find Info object */
const char* pszInfo = strstr(szBuf, "/Info");
if (pszInfo != NULL)
{
pszInfo += 5;
while(*pszInfo == ' ')
pszInfo ++;
if (!ParseIndirectRef(pszInfo, nInfoId, nInfoGen))
{
CPLError(CE_Failure, CPLE_AppDefined, "Cannot parse trailer /Info");
nInfoId = nInfoGen = 0;
}
}
VSIFSeekL(fp, 0, SEEK_END);
return TRUE;
}
/************************************************************************/
/* Close() */
/************************************************************************/
void GDALPDFWriter::Close()
{
if (fp)
{
CPLAssert(!bInWriteObj);
if (nPageResourceId)
{
WritePages();
WriteXRefTableAndTrailer();
}
else if (bCanUpdate)
{
WriteXRefTableAndTrailer();
}
VSIFCloseL(fp);
}
fp = NULL;
}
/************************************************************************/
/* UpdateProj() */
/************************************************************************/
void GDALPDFWriter::UpdateProj(GDALDataset* poSrcDS,
double dfDPI,
GDALPDFDictionaryRW* poPageDict,
int nPageNum, int nPageGen)
{
bCanUpdate = TRUE;
if ((int)asXRefEntries.size() < nLastXRefSize - 1)
asXRefEntries.resize(nLastXRefSize - 1);
int nViewportId = 0;
int nLGIDictId = 0;
CPLAssert(nPageNum != 0);
CPLAssert(poPageDict != NULL);
PDFMargins sMargins = {0, 0, 0, 0};
const char* pszGEO_ENCODING = CPLGetConfigOption("GDAL_PDF_GEO_ENCODING", "ISO32000");
if (EQUAL(pszGEO_ENCODING, "ISO32000") || EQUAL(pszGEO_ENCODING, "BOTH"))
nViewportId = WriteSRS_ISO32000(poSrcDS, dfDPI * USER_UNIT_IN_INCH, NULL, &sMargins, TRUE);
if (EQUAL(pszGEO_ENCODING, "OGC_BP") || EQUAL(pszGEO_ENCODING, "BOTH"))
nLGIDictId = WriteSRS_OGC_BP(poSrcDS, dfDPI * USER_UNIT_IN_INCH, NULL, &sMargins);
#ifdef invalidate_xref_entry
GDALPDFObject* poVP = poPageDict->Get("VP");
if (poVP)
{
if (poVP->GetType() == PDFObjectType_Array &&
poVP->GetArray()->GetLength() == 1)
poVP = poVP->GetArray()->Get(0);
int nVPId = poVP->GetRefNum();
if (nVPId)
{
asXRefEntries[nVPId - 1].bFree = TRUE;
asXRefEntries[nVPId - 1].nGen ++;
}
}
#endif
poPageDict->Remove("VP");
poPageDict->Remove("LGIDict");
if (nViewportId)
{
poPageDict->Add("VP", &((new GDALPDFArrayRW())->
Add(nViewportId, 0)));
}
if (nLGIDictId)
{
poPageDict->Add("LGIDict", nLGIDictId, 0);
}
StartObj(nPageNum, nPageGen);
VSIFPrintfL(fp, "%s\n", poPageDict->Serialize().c_str());
EndObj();
}
/************************************************************************/
/* UpdateInfo() */
/************************************************************************/
void GDALPDFWriter::UpdateInfo(GDALDataset* poSrcDS)
{
bCanUpdate = TRUE;
if ((int)asXRefEntries.size() < nLastXRefSize - 1)
asXRefEntries.resize(nLastXRefSize - 1);
int nNewInfoId = SetInfo(poSrcDS, NULL);
/* Write empty info, because podofo driver will find the dangling info instead */
if (nNewInfoId == 0 && nInfoId != 0)
{
#ifdef invalidate_xref_entry
asXRefEntries[nInfoId - 1].bFree = TRUE;
asXRefEntries[nInfoId - 1].nGen ++;
#else
StartObj(nInfoId, nInfoGen);
VSIFPrintfL(fp, "<< >>\n");
EndObj();
#endif
}
}
/************************************************************************/
/* UpdateXMP() */
/************************************************************************/
void GDALPDFWriter::UpdateXMP(GDALDataset* poSrcDS,
GDALPDFDictionaryRW* poCatalogDict)
{
bCanUpdate = TRUE;
if ((int)asXRefEntries.size() < nLastXRefSize - 1)
asXRefEntries.resize(nLastXRefSize - 1);
CPLAssert(nCatalogId != 0);
CPLAssert(poCatalogDict != NULL);
GDALPDFObject* poMetadata = poCatalogDict->Get("Metadata");
if (poMetadata)
{
nXMPId = poMetadata->GetRefNum();
nXMPGen = poMetadata->GetRefGen();
}
poCatalogDict->Remove("Metadata");
int nNewXMPId = SetXMP(poSrcDS, NULL);
/* Write empty metadata, because podofo driver will find the dangling info instead */
if (nNewXMPId == 0 && nXMPId != 0)
{
StartObj(nXMPId, nXMPGen);
VSIFPrintfL(fp, "<< >>\n");
EndObj();
}
if (nXMPId)
poCatalogDict->Add("Metadata", nXMPId, 0);
StartObj(nCatalogId, nCatalogGen);
VSIFPrintfL(fp, "%s\n", poCatalogDict->Serialize().c_str());
EndObj();
}
/************************************************************************/
/* AllocNewObject() */
/************************************************************************/
int GDALPDFWriter::AllocNewObject()
{
asXRefEntries.push_back(GDALXRefEntry());
return (int)asXRefEntries.size();
}
/************************************************************************/
/* WriteXRefTableAndTrailer() */
/************************************************************************/
void GDALPDFWriter::WriteXRefTableAndTrailer()
{
vsi_l_offset nOffsetXREF = VSIFTellL(fp);
VSIFPrintfL(fp, "xref\n");
char buffer[16];
if (bCanUpdate)
{
VSIFPrintfL(fp, "0 1\n");
VSIFPrintfL(fp, "0000000000 65535 f \n");
for(size_t i=0;i<asXRefEntries.size();)
{
if (asXRefEntries[i].nOffset != 0 || asXRefEntries[i].bFree)
{
/* Find number of consecutive objects */
size_t nCount = 1;
while(i + nCount <asXRefEntries.size() &&
(asXRefEntries[i + nCount].nOffset != 0 || asXRefEntries[i + nCount].bFree))
nCount ++;
VSIFPrintfL(fp, "%d %d\n", (int)i + 1, (int)nCount);
size_t iEnd = i + nCount;
for(; i < iEnd; i++)
{
snprintf (buffer, sizeof(buffer),
"%010" CPL_FRMT_GB_WITHOUT_PREFIX "u",
asXRefEntries[i].nOffset);
VSIFPrintfL(fp, "%s %05d %c \n",
buffer, asXRefEntries[i].nGen,
asXRefEntries[i].bFree ? 'f' : 'n');
}
}
else
{
i++;
}
}
}
else
{
VSIFPrintfL(fp, "%d %d\n",
0, (int)asXRefEntries.size() + 1);
VSIFPrintfL(fp, "0000000000 65535 f \n");
for(size_t i=0;i<asXRefEntries.size();i++)
{
snprintf (buffer, sizeof(buffer),
"%010" CPL_FRMT_GB_WITHOUT_PREFIX "u",
asXRefEntries[i].nOffset);
VSIFPrintfL(fp, "%s %05d n \n", buffer, asXRefEntries[i].nGen);
}
}
VSIFPrintfL(fp, "trailer\n");
GDALPDFDictionaryRW oDict;
oDict.Add("Size", (int)asXRefEntries.size() + 1)
.Add("Root", nCatalogId, nCatalogGen);
if (nInfoId)
oDict.Add("Info", nInfoId, nInfoGen);
if (nLastStartXRef)
oDict.Add("Prev", (double)nLastStartXRef);
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
VSIFPrintfL(fp,
"startxref\n"
CPL_FRMT_GUIB "\n"
"%%%%EOF\n",
nOffsetXREF);
}
/************************************************************************/
/* StartObj() */
/************************************************************************/
void GDALPDFWriter::StartObj(int nObjectId, int nGen)
{
CPLAssert(!bInWriteObj);
CPLAssert(nObjectId - 1 < (int)asXRefEntries.size());
CPLAssert(asXRefEntries[nObjectId - 1].nOffset == 0);
asXRefEntries[nObjectId - 1].nOffset = VSIFTellL(fp);
asXRefEntries[nObjectId - 1].nGen = nGen;
VSIFPrintfL(fp, "%d %d obj\n", nObjectId, nGen);
bInWriteObj = TRUE;
}
/************************************************************************/
/* EndObj() */
/************************************************************************/
void GDALPDFWriter::EndObj()
{
CPLAssert(bInWriteObj);
VSIFPrintfL(fp, "endobj\n");
bInWriteObj = FALSE;
}
/************************************************************************/
/* GDALPDFFind4Corners() */
/************************************************************************/
static
void GDALPDFFind4Corners(const GDAL_GCP* pasGCPList,
int& iUL, int& iUR, int& iLR, int& iLL)
{
double dfMeanX = 0, dfMeanY = 0;
int i;
iUL = 0;
iUR = 0;
iLR = 0;
iLL = 0;
for(i = 0; i < 4; i++ )
{
dfMeanX += pasGCPList[i].dfGCPPixel;
dfMeanY += pasGCPList[i].dfGCPLine;
}
dfMeanX /= 4;
dfMeanY /= 4;
for(i = 0; i < 4; i++ )
{
if (pasGCPList[i].dfGCPPixel < dfMeanX &&
pasGCPList[i].dfGCPLine < dfMeanY )
iUL = i;
else if (pasGCPList[i].dfGCPPixel > dfMeanX &&
pasGCPList[i].dfGCPLine < dfMeanY )
iUR = i;
else if (pasGCPList[i].dfGCPPixel > dfMeanX &&
pasGCPList[i].dfGCPLine > dfMeanY )
iLR = i;
else if (pasGCPList[i].dfGCPPixel < dfMeanX &&
pasGCPList[i].dfGCPLine > dfMeanY )
iLL = i;
}
}
/************************************************************************/
/* WriteSRS_ISO32000() */
/************************************************************************/
int GDALPDFWriter::WriteSRS_ISO32000(GDALDataset* poSrcDS,
double dfUserUnit,
const char* pszNEATLINE,
PDFMargins* psMargins,
int bWriteViewport)
{
int nWidth = poSrcDS->GetRasterXSize();
int nHeight = poSrcDS->GetRasterYSize();
const char* pszWKT = poSrcDS->GetProjectionRef();
double adfGeoTransform[6];
int bHasGT = (poSrcDS->GetGeoTransform(adfGeoTransform) == CE_None);
const GDAL_GCP* pasGCPList = (poSrcDS->GetGCPCount() == 4) ? poSrcDS->GetGCPs() : NULL;
if (pasGCPList != NULL)
pszWKT = poSrcDS->GetGCPProjection();
if( !bHasGT && pasGCPList == NULL )
return 0;
if( pszWKT == NULL || EQUAL(pszWKT, "") )
return 0;
double adfGPTS[8];
double dfULPixel = 0;
double dfULLine = 0;
double dfLRPixel = nWidth;
double dfLRLine = nHeight;
GDAL_GCP asNeatLineGCPs[4];
if (pszNEATLINE == NULL)
pszNEATLINE = poSrcDS->GetMetadataItem("NEATLINE");
if( bHasGT && pszNEATLINE != NULL && pszNEATLINE[0] != '\0' )
{
OGRGeometry* poGeom = NULL;
OGRGeometryFactory::createFromWkt( (char**)&pszNEATLINE, NULL, &poGeom );
if ( poGeom != NULL && wkbFlatten(poGeom->getGeometryType()) == wkbPolygon )
{
OGRLineString* poLS = ((OGRPolygon*)poGeom)->getExteriorRing();
double adfGeoTransformInv[6];
if( poLS != NULL && poLS->getNumPoints() == 5 &&
GDALInvGeoTransform(adfGeoTransform, adfGeoTransformInv) )
{
for(int i=0;i<4;i++)
{
double X = asNeatLineGCPs[i].dfGCPX = poLS->getX(i);
double Y = asNeatLineGCPs[i].dfGCPY = poLS->getY(i);
double x = adfGeoTransformInv[0] + X * adfGeoTransformInv[1] + Y * adfGeoTransformInv[2];
double y = adfGeoTransformInv[3] + X * adfGeoTransformInv[4] + Y * adfGeoTransformInv[5];
asNeatLineGCPs[i].dfGCPPixel = x;
asNeatLineGCPs[i].dfGCPLine = y;
}
int iUL = 0, iUR = 0, iLR = 0, iLL = 0;
GDALPDFFind4Corners(asNeatLineGCPs,
iUL,iUR, iLR, iLL);
if (fabs(asNeatLineGCPs[iUL].dfGCPPixel - asNeatLineGCPs[iLL].dfGCPPixel) > .5 ||
fabs(asNeatLineGCPs[iUR].dfGCPPixel - asNeatLineGCPs[iLR].dfGCPPixel) > .5 ||
fabs(asNeatLineGCPs[iUL].dfGCPLine - asNeatLineGCPs[iUR].dfGCPLine) > .5 ||
fabs(asNeatLineGCPs[iLL].dfGCPLine - asNeatLineGCPs[iLR].dfGCPLine) > .5)
{
CPLError(CE_Warning, CPLE_NotSupported,
"Neatline coordinates should form a rectangle in pixel space. Ignoring it");
for(int i=0;i<4;i++)
{
CPLDebug("PDF", "pixel[%d] = %.1f, line[%d] = %.1f",
i, asNeatLineGCPs[i].dfGCPPixel,
i, asNeatLineGCPs[i].dfGCPLine);
}
}
else
{
pasGCPList = asNeatLineGCPs;
}
}
}
delete poGeom;
}
if( pasGCPList )
{
int iUL = 0, iUR = 0, iLR = 0, iLL = 0;
GDALPDFFind4Corners(pasGCPList,
iUL,iUR, iLR, iLL);
if (fabs(pasGCPList[iUL].dfGCPPixel - pasGCPList[iLL].dfGCPPixel) > .5 ||
fabs(pasGCPList[iUR].dfGCPPixel - pasGCPList[iLR].dfGCPPixel) > .5 ||
fabs(pasGCPList[iUL].dfGCPLine - pasGCPList[iUR].dfGCPLine) > .5 ||
fabs(pasGCPList[iLL].dfGCPLine - pasGCPList[iLR].dfGCPLine) > .5)
{
CPLError(CE_Failure, CPLE_NotSupported,
"GCPs should form a rectangle in pixel space");
return 0;
}
dfULPixel = pasGCPList[iUL].dfGCPPixel;
dfULLine = pasGCPList[iUL].dfGCPLine;
dfLRPixel = pasGCPList[iLR].dfGCPPixel;
dfLRLine = pasGCPList[iLR].dfGCPLine;
/* Upper-left */
adfGPTS[0] = pasGCPList[iUL].dfGCPX;
adfGPTS[1] = pasGCPList[iUL].dfGCPY;
/* Lower-left */
adfGPTS[2] = pasGCPList[iLL].dfGCPX;
adfGPTS[3] = pasGCPList[iLL].dfGCPY;
/* Lower-right */
adfGPTS[4] = pasGCPList[iLR].dfGCPX;
adfGPTS[5] = pasGCPList[iLR].dfGCPY;
/* Upper-right */
adfGPTS[6] = pasGCPList[iUR].dfGCPX;
adfGPTS[7] = pasGCPList[iUR].dfGCPY;
}
else
{
/* Upper-left */
adfGPTS[0] = PIXEL_TO_GEO_X(0, 0);
adfGPTS[1] = PIXEL_TO_GEO_Y(0, 0);
/* Lower-left */
adfGPTS[2] = PIXEL_TO_GEO_X(0, nHeight);
adfGPTS[3] = PIXEL_TO_GEO_Y(0, nHeight);
/* Lower-right */
adfGPTS[4] = PIXEL_TO_GEO_X(nWidth, nHeight);
adfGPTS[5] = PIXEL_TO_GEO_Y(nWidth, nHeight);
/* Upper-right */
adfGPTS[6] = PIXEL_TO_GEO_X(nWidth, 0);
adfGPTS[7] = PIXEL_TO_GEO_Y(nWidth, 0);
}
OGRSpatialReferenceH hSRS = OSRNewSpatialReference(pszWKT);
if( hSRS == NULL )
return 0;
OGRSpatialReferenceH hSRSGeog = OSRCloneGeogCS(hSRS);
if( hSRSGeog == NULL )
{
OSRDestroySpatialReference(hSRS);
return 0;
}
OGRCoordinateTransformationH hCT = OCTNewCoordinateTransformation( hSRS, hSRSGeog);
if( hCT == NULL )
{
OSRDestroySpatialReference(hSRS);
OSRDestroySpatialReference(hSRSGeog);
return 0;
}
int bSuccess = TRUE;
bSuccess &= (OCTTransform( hCT, 1, adfGPTS + 0, adfGPTS + 1, NULL ) == 1);
bSuccess &= (OCTTransform( hCT, 1, adfGPTS + 2, adfGPTS + 3, NULL ) == 1);
bSuccess &= (OCTTransform( hCT, 1, adfGPTS + 4, adfGPTS + 5, NULL ) == 1);
bSuccess &= (OCTTransform( hCT, 1, adfGPTS + 6, adfGPTS + 7, NULL ) == 1);
if (!bSuccess)
{
OSRDestroySpatialReference(hSRS);
OSRDestroySpatialReference(hSRSGeog);
OCTDestroyCoordinateTransformation(hCT);
return 0;
}
const char * pszAuthorityCode = OSRGetAuthorityCode( hSRS, NULL );
const char * pszAuthorityName = OSRGetAuthorityName( hSRS, NULL );
int nEPSGCode = 0;
if( pszAuthorityName != NULL && EQUAL(pszAuthorityName, "EPSG") &&
pszAuthorityCode != NULL )
nEPSGCode = atoi(pszAuthorityCode);
int bIsGeographic = OSRIsGeographic(hSRS);
OSRMorphToESRI(hSRS);
char* pszESRIWKT = NULL;
OSRExportToWkt(hSRS, &pszESRIWKT);
OSRDestroySpatialReference(hSRS);
OSRDestroySpatialReference(hSRSGeog);
OCTDestroyCoordinateTransformation(hCT);
hSRS = NULL;
hSRSGeog = NULL;
hCT = NULL;
if (pszESRIWKT == NULL)
return 0;
int nViewportId = (bWriteViewport) ? AllocNewObject() : 0;
int nMeasureId = AllocNewObject();
int nGCSId = AllocNewObject();
if (nViewportId)
{
StartObj(nViewportId);
GDALPDFDictionaryRW oViewPortDict;
oViewPortDict.Add("Type", GDALPDFObjectRW::CreateName("Viewport"))
.Add("Name", "Layer")
.Add("BBox", &((new GDALPDFArrayRW())
->Add(dfULPixel / dfUserUnit + psMargins->nLeft)
.Add((nHeight - dfLRLine) / dfUserUnit + psMargins->nBottom)
.Add(dfLRPixel / dfUserUnit + psMargins->nLeft)
.Add((nHeight - dfULLine) / dfUserUnit + psMargins->nBottom)))
.Add("Measure", nMeasureId, 0);
VSIFPrintfL(fp, "%s\n", oViewPortDict.Serialize().c_str());
EndObj();
}
StartObj(nMeasureId);
GDALPDFDictionaryRW oMeasureDict;
oMeasureDict .Add("Type", GDALPDFObjectRW::CreateName("Measure"))
.Add("Subtype", GDALPDFObjectRW::CreateName("GEO"))
.Add("Bounds", &((new GDALPDFArrayRW())
->Add(0).Add(1).
Add(0).Add(0).
Add(1).Add(0).
Add(1).Add(1)))
.Add("GPTS", &((new GDALPDFArrayRW())
->Add(adfGPTS[1]).Add(adfGPTS[0]).
Add(adfGPTS[3]).Add(adfGPTS[2]).
Add(adfGPTS[5]).Add(adfGPTS[4]).
Add(adfGPTS[7]).Add(adfGPTS[6])))
.Add("LPTS", &((new GDALPDFArrayRW())
->Add(0).Add(1).
Add(0).Add(0).
Add(1).Add(0).
Add(1).Add(1)))
.Add("GCS", nGCSId, 0);
VSIFPrintfL(fp, "%s\n", oMeasureDict.Serialize().c_str());
EndObj();
StartObj(nGCSId);
GDALPDFDictionaryRW oGCSDict;
oGCSDict.Add("Type", GDALPDFObjectRW::CreateName(bIsGeographic ? "GEOGCS" : "PROJCS"))
.Add("WKT", pszESRIWKT);
if (nEPSGCode)
oGCSDict.Add("EPSG", nEPSGCode);
VSIFPrintfL(fp, "%s\n", oGCSDict.Serialize().c_str());
EndObj();
CPLFree(pszESRIWKT);
return nViewportId ? nViewportId : nMeasureId;
}
/************************************************************************/
/* GDALPDFBuildOGC_BP_Datum() */
/************************************************************************/
static GDALPDFObject* GDALPDFBuildOGC_BP_Datum(const OGRSpatialReference* poSRS)
{
const OGR_SRSNode* poDatumNode = poSRS->GetAttrNode("DATUM");
const char* pszDatumDescription = NULL;
if (poDatumNode && poDatumNode->GetChildCount() > 0)
pszDatumDescription = poDatumNode->GetChild(0)->GetValue();
GDALPDFObjectRW* poPDFDatum = NULL;
if (pszDatumDescription)
{
double dfSemiMajor = poSRS->GetSemiMajor();
double dfInvFlattening = poSRS->GetInvFlattening();
int nEPSGDatum = -1;
const char *pszAuthority = poSRS->GetAuthorityName( "DATUM" );
if( pszAuthority != NULL && EQUAL(pszAuthority,"EPSG") )
nEPSGDatum = atoi(poSRS->GetAuthorityCode( "DATUM" ));
if( EQUAL(pszDatumDescription,SRS_DN_WGS84) || nEPSGDatum == 6326 )
poPDFDatum = GDALPDFObjectRW::CreateString("WGE");
else if( EQUAL(pszDatumDescription, SRS_DN_NAD27) || nEPSGDatum == 6267 )
poPDFDatum = GDALPDFObjectRW::CreateString("NAS");
else if( EQUAL(pszDatumDescription, SRS_DN_NAD83) || nEPSGDatum == 6269 )
poPDFDatum = GDALPDFObjectRW::CreateString("NAR");
else if( nEPSGDatum == 6135 )
poPDFDatum = GDALPDFObjectRW::CreateString("OHA-M");
else
{
CPLDebug("PDF",
"Unhandled datum name (%s). Write datum parameters then.",
pszDatumDescription);
GDALPDFDictionaryRW* poPDFDatumDict = new GDALPDFDictionaryRW();
poPDFDatum = GDALPDFObjectRW::CreateDictionary(poPDFDatumDict);
const OGR_SRSNode* poSpheroidNode = poSRS->GetAttrNode("SPHEROID");
if (poSpheroidNode && poSpheroidNode->GetChildCount() >= 3)
{
poPDFDatumDict->Add("Description", pszDatumDescription);
const char* pszEllipsoidCode = NULL;
#ifdef disabled_because_terrago_toolbar_does_not_like_it
if( ABS(dfSemiMajor-6378249.145) < 0.01
&& ABS(dfInvFlattening-293.465) < 0.0001 )
{
pszEllipsoidCode = "CD"; /* Clark 1880 */
}
else if( ABS(dfSemiMajor-6378245.0) < 0.01
&& ABS(dfInvFlattening-298.3) < 0.0001 )
{
pszEllipsoidCode = "KA"; /* Krassovsky */
}
else if( ABS(dfSemiMajor-6378388.0) < 0.01
&& ABS(dfInvFlattening-297.0) < 0.0001 )
{
pszEllipsoidCode = "IN"; /* International 1924 */
}
else if( ABS(dfSemiMajor-6378160.0) < 0.01
&& ABS(dfInvFlattening-298.25) < 0.0001 )
{
pszEllipsoidCode = "AN"; /* Australian */
}
else if( ABS(dfSemiMajor-6377397.155) < 0.01
&& ABS(dfInvFlattening-299.1528128) < 0.0001 )
{
pszEllipsoidCode = "BR"; /* Bessel 1841 */
}
else if( ABS(dfSemiMajor-6377483.865) < 0.01
&& ABS(dfInvFlattening-299.1528128) < 0.0001 )
{
pszEllipsoidCode = "BN"; /* Bessel 1841 (Namibia / Schwarzeck)*/
}
#if 0
else if( ABS(dfSemiMajor-6378160.0) < 0.01
&& ABS(dfInvFlattening-298.247167427) < 0.0001 )
{
pszEllipsoidCode = "GRS67"; /* GRS 1967 */
}
#endif
else if( ABS(dfSemiMajor-6378137) < 0.01
&& ABS(dfInvFlattening-298.257222101) < 0.000001 )
{
pszEllipsoidCode = "RF"; /* GRS 1980 */
}
else if( ABS(dfSemiMajor-6378206.4) < 0.01
&& ABS(dfInvFlattening-294.9786982) < 0.0001 )
{
pszEllipsoidCode = "CC"; /* Clarke 1866 */
}
else if( ABS(dfSemiMajor-6377340.189) < 0.01
&& ABS(dfInvFlattening-299.3249646) < 0.0001 )
{
pszEllipsoidCode = "AM"; /* Modified Airy */
}
else if( ABS(dfSemiMajor-6377563.396) < 0.01
&& ABS(dfInvFlattening-299.3249646) < 0.0001 )
{
pszEllipsoidCode = "AA"; /* Airy */
}
else if( ABS(dfSemiMajor-6378200) < 0.01
&& ABS(dfInvFlattening-298.3) < 0.0001 )
{
pszEllipsoidCode = "HE"; /* Helmert 1906 */
}
else if( ABS(dfSemiMajor-6378155) < 0.01
&& ABS(dfInvFlattening-298.3) < 0.0001 )
{
pszEllipsoidCode = "FA"; /* Modified Fischer 1960 */
}
#if 0
else if( ABS(dfSemiMajor-6377298.556) < 0.01
&& ABS(dfInvFlattening-300.8017) < 0.0001 )
{
pszEllipsoidCode = "evrstSS"; /* Everest (Sabah & Sarawak) */
}
else if( ABS(dfSemiMajor-6378165.0) < 0.01
&& ABS(dfInvFlattening-298.3) < 0.0001 )
{
pszEllipsoidCode = "WGS60";
}
else if( ABS(dfSemiMajor-6378145.0) < 0.01
&& ABS(dfInvFlattening-298.25) < 0.0001 )
{
pszEllipsoidCode = "WGS66";
}
#endif
else if( ABS(dfSemiMajor-6378135.0) < 0.01
&& ABS(dfInvFlattening-298.26) < 0.0001 )
{
pszEllipsoidCode = "WD";
}
else if( ABS(dfSemiMajor-6378137.0) < 0.01
&& ABS(dfInvFlattening-298.257223563) < 0.000001 )
{
pszEllipsoidCode = "WE";
}
#endif
if( pszEllipsoidCode != NULL )
{
poPDFDatumDict->Add("Ellipsoid", pszEllipsoidCode);
}
else
{
const char* pszEllipsoidDescription =
poSpheroidNode->GetChild(0)->GetValue();
CPLDebug("PDF",
"Unhandled ellipsoid name (%s). Write ellipsoid parameters then.",
pszEllipsoidDescription);
poPDFDatumDict->Add("Ellipsoid",
&((new GDALPDFDictionaryRW())
->Add("Description", pszEllipsoidDescription)
.Add("SemiMajorAxis", dfSemiMajor, TRUE)
.Add("InvFlattening", dfInvFlattening, TRUE)));
}
const OGR_SRSNode *poTOWGS84 = poSRS->GetAttrNode( "TOWGS84" );
if( poTOWGS84 != NULL
&& poTOWGS84->GetChildCount() >= 3
&& (poTOWGS84->GetChildCount() < 7
|| (EQUAL(poTOWGS84->GetChild(3)->GetValue(),"")
&& EQUAL(poTOWGS84->GetChild(4)->GetValue(),"")
&& EQUAL(poTOWGS84->GetChild(5)->GetValue(),"")
&& EQUAL(poTOWGS84->GetChild(6)->GetValue(),""))) )
{
poPDFDatumDict->Add("ToWGS84",
&((new GDALPDFDictionaryRW())
->Add("dx", poTOWGS84->GetChild(0)->GetValue())
.Add("dy", poTOWGS84->GetChild(1)->GetValue())
.Add("dz", poTOWGS84->GetChild(2)->GetValue())) );
}
else if( poTOWGS84 != NULL && poTOWGS84->GetChildCount() >= 7)
{
poPDFDatumDict->Add("ToWGS84",
&((new GDALPDFDictionaryRW())
->Add("dx", poTOWGS84->GetChild(0)->GetValue())
.Add("dy", poTOWGS84->GetChild(1)->GetValue())
.Add("dz", poTOWGS84->GetChild(2)->GetValue())
.Add("rx", poTOWGS84->GetChild(3)->GetValue())
.Add("ry", poTOWGS84->GetChild(4)->GetValue())
.Add("rz", poTOWGS84->GetChild(5)->GetValue())
.Add("sf", poTOWGS84->GetChild(6)->GetValue())) );
}
}
}
}
else
{
CPLError(CE_Warning, CPLE_NotSupported,
"No datum name. Defaulting to WGS84.");
}
if (poPDFDatum == NULL)
poPDFDatum = GDALPDFObjectRW::CreateString("WGE");
return poPDFDatum;
}
/************************************************************************/
/* GDALPDFBuildOGC_BP_Projection() */
/************************************************************************/
static GDALPDFDictionaryRW* GDALPDFBuildOGC_BP_Projection(const OGRSpatialReference* poSRS)
{
const char* pszProjectionOGCBP = "GEOGRAPHIC";
const char *pszProjection = poSRS->GetAttrValue("PROJECTION");
GDALPDFDictionaryRW* poProjectionDict = new GDALPDFDictionaryRW();
poProjectionDict->Add("Type", GDALPDFObjectRW::CreateName("Projection"));
poProjectionDict->Add("Datum", GDALPDFBuildOGC_BP_Datum(poSRS));
if( pszProjection == NULL )
{
if( poSRS->IsGeographic() )
pszProjectionOGCBP = "GEOGRAPHIC";
else if( poSRS->IsLocal() )
pszProjectionOGCBP = "LOCAL CARTESIAN";
else
{
CPLError(CE_Warning, CPLE_NotSupported, "Unsupported SRS type");
delete poProjectionDict;
return NULL;
}
}
else if( EQUAL(pszProjection, SRS_PT_TRANSVERSE_MERCATOR) )
{
int bNorth;
int nZone = poSRS->GetUTMZone( &bNorth );
if( nZone != 0 )
{
pszProjectionOGCBP = "UT";
poProjectionDict->Add("Hemisphere", (bNorth) ? "N" : "S");
poProjectionDict->Add("Zone", nZone);
}
else
{
double dfCenterLat = poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN,90.L);
double dfCenterLong = poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN,0.0);
double dfScale = poSRS->GetNormProjParm(SRS_PP_SCALE_FACTOR,1.0);
double dfFalseEasting = poSRS->GetNormProjParm(SRS_PP_FALSE_EASTING,0.0);
double dfFalseNorthing = poSRS->GetNormProjParm(SRS_PP_FALSE_NORTHING,0.0);
/* OGC_BP supports representing numbers as strings for better precision */
/* so use it */
pszProjectionOGCBP = "TC";
poProjectionDict->Add("OriginLatitude", dfCenterLat, TRUE);
poProjectionDict->Add("CentralMeridian", dfCenterLong, TRUE);
poProjectionDict->Add("ScaleFactor", dfScale, TRUE);
poProjectionDict->Add("FalseEasting", dfFalseEasting, TRUE);
poProjectionDict->Add("FalseNorthing", dfFalseNorthing, TRUE);
}
}
else if( EQUAL(pszProjection,SRS_PT_POLAR_STEREOGRAPHIC) )
{
double dfCenterLat = poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN,0.0);
double dfCenterLong = poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN,0.0);
double dfScale = poSRS->GetNormProjParm(SRS_PP_SCALE_FACTOR,1.0);
double dfFalseEasting = poSRS->GetNormProjParm(SRS_PP_FALSE_EASTING,0.0);
double dfFalseNorthing = poSRS->GetNormProjParm(SRS_PP_FALSE_NORTHING,0.0);
if( fabs(dfCenterLat) == 90.0 && dfCenterLong == 0.0 &&
dfScale == 0.994 && dfFalseEasting == 200000.0 && dfFalseNorthing == 200000.0)
{
pszProjectionOGCBP = "UP";
poProjectionDict->Add("Hemisphere", (dfCenterLat > 0) ? "N" : "S");
}
else
{
pszProjectionOGCBP = "PG";
poProjectionDict->Add("LatitudeTrueScale", dfCenterLat, TRUE);
poProjectionDict->Add("LongitudeDownFromPole", dfCenterLong, TRUE);
poProjectionDict->Add("ScaleFactor", dfScale, TRUE);
poProjectionDict->Add("FalseEasting", dfFalseEasting, TRUE);
poProjectionDict->Add("FalseNorthing", dfFalseNorthing, TRUE);
}
}
else if( EQUAL(pszProjection,SRS_PT_LAMBERT_CONFORMAL_CONIC_2SP))
{
double dfStdP1 = poSRS->GetNormProjParm(SRS_PP_STANDARD_PARALLEL_1,0.0);
double dfStdP2 = poSRS->GetNormProjParm(SRS_PP_STANDARD_PARALLEL_2,0.0);
double dfCenterLat = poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN,0.0);
double dfCenterLong = poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN,0.0);
double dfFalseEasting = poSRS->GetNormProjParm(SRS_PP_FALSE_EASTING,0.0);
double dfFalseNorthing = poSRS->GetNormProjParm(SRS_PP_FALSE_NORTHING,0.0);
pszProjectionOGCBP = "LE";
poProjectionDict->Add("StandardParallelOne", dfStdP1, TRUE);
poProjectionDict->Add("StandardParallelTwo", dfStdP2, TRUE);
poProjectionDict->Add("OriginLatitude", dfCenterLat, TRUE);
poProjectionDict->Add("CentralMeridian", dfCenterLong, TRUE);
poProjectionDict->Add("FalseEasting", dfFalseEasting, TRUE);
poProjectionDict->Add("FalseNorthing", dfFalseNorthing, TRUE);
}
else if( EQUAL(pszProjection,SRS_PT_MERCATOR_1SP) )
{
double dfCenterLong = poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN,0.0);
double dfCenterLat = poSRS->GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN,0.0);
double dfScale = poSRS->GetNormProjParm(SRS_PP_SCALE_FACTOR,1.0);
double dfFalseEasting = poSRS->GetNormProjParm(SRS_PP_FALSE_EASTING,0.0);
double dfFalseNorthing = poSRS->GetNormProjParm(SRS_PP_FALSE_NORTHING,0.0);
pszProjectionOGCBP = "MC";
poProjectionDict->Add("CentralMeridian", dfCenterLong, TRUE);
poProjectionDict->Add("OriginLatitude", dfCenterLat, TRUE);
poProjectionDict->Add("ScaleFactor", dfScale, TRUE);
poProjectionDict->Add("FalseEasting", dfFalseEasting, TRUE);
poProjectionDict->Add("FalseNorthing", dfFalseNorthing, TRUE);
}
#ifdef not_supported
else if( EQUAL(pszProjection,SRS_PT_MERCATOR_2SP) )
{
double dfStdP1 = poSRS->GetNormProjParm(SRS_PP_STANDARD_PARALLEL_1,0.0);
double dfCenterLong = poSRS->GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN,0.0);
double dfFalseEasting = poSRS->GetNormProjParm(SRS_PP_FALSE_EASTING,0.0);
double dfFalseNorthing = poSRS->GetNormProjParm(SRS_PP_FALSE_NORTHING,0.0);
pszProjectionOGCBP = "MC";
poProjectionDict->Add("StandardParallelOne", dfStdP1, TRUE);
poProjectionDict->Add("CentralMeridian", dfCenterLong, TRUE);
poProjectionDict->Add("FalseEasting", dfFalseEasting, TRUE);
poProjectionDict->Add("FalseNorthing", dfFalseNorthing, TRUE);
}
#endif
else
{
CPLError(CE_Warning, CPLE_NotSupported,
"Unhandled projection type (%s) for now", pszProjection);
}
poProjectionDict->Add("ProjectionType", pszProjectionOGCBP);
if( poSRS->IsProjected() )
{
char* pszUnitName = NULL;
double dfLinearUnits = poSRS->GetLinearUnits(&pszUnitName);
if (dfLinearUnits == 1.0)
poProjectionDict->Add("Units", "M");
else if (dfLinearUnits == 0.3048)
poProjectionDict->Add("Units", "FT");
}
return poProjectionDict;
}
/************************************************************************/
/* WriteSRS_OGC_BP() */
/************************************************************************/
int GDALPDFWriter::WriteSRS_OGC_BP(GDALDataset* poSrcDS,
double dfUserUnit,
const char* pszNEATLINE,
PDFMargins* psMargins)
{
int nWidth = poSrcDS->GetRasterXSize();
int nHeight = poSrcDS->GetRasterYSize();
const char* pszWKT = poSrcDS->GetProjectionRef();
double adfGeoTransform[6];
int bHasGT = (poSrcDS->GetGeoTransform(adfGeoTransform) == CE_None);
int nGCPCount = poSrcDS->GetGCPCount();
const GDAL_GCP* pasGCPList = (nGCPCount >= 4) ? poSrcDS->GetGCPs() : NULL;
if (pasGCPList != NULL)
pszWKT = poSrcDS->GetGCPProjection();
if( !bHasGT && pasGCPList == NULL )
return 0;
if( pszWKT == NULL || EQUAL(pszWKT, "") )
return 0;
if( !bHasGT )
{
if (!GDALGCPsToGeoTransform( nGCPCount, pasGCPList,
adfGeoTransform, FALSE ))
{
CPLDebug("PDF", "Could not compute GT with exact match. Writing Registration then");
}
else
{
bHasGT = TRUE;
}
}
OGRSpatialReferenceH hSRS = OSRNewSpatialReference(pszWKT);
if( hSRS == NULL )
return 0;
const OGRSpatialReference* poSRS = (const OGRSpatialReference*)hSRS;
GDALPDFDictionaryRW* poProjectionDict = GDALPDFBuildOGC_BP_Projection(poSRS);
if (poProjectionDict == NULL)
{
OSRDestroySpatialReference(hSRS);
return 0;
}
GDALPDFArrayRW* poNeatLineArray = NULL;
if (pszNEATLINE == NULL)
pszNEATLINE = poSrcDS->GetMetadataItem("NEATLINE");
if( bHasGT && pszNEATLINE != NULL && !EQUAL(pszNEATLINE, "NO") && pszNEATLINE[0] != '\0' )
{
OGRGeometry* poGeom = NULL;
OGRGeometryFactory::createFromWkt( (char**)&pszNEATLINE, NULL, &poGeom );
if ( poGeom != NULL && wkbFlatten(poGeom->getGeometryType()) == wkbPolygon )
{
OGRLineString* poLS = ((OGRPolygon*)poGeom)->getExteriorRing();
double adfGeoTransformInv[6];
if( poLS != NULL && poLS->getNumPoints() >= 5 &&
GDALInvGeoTransform(adfGeoTransform, adfGeoTransformInv) )
{
poNeatLineArray = new GDALPDFArrayRW();
// FIXME : ensure that they are in clockwise order ?
for(int i=0;i<poLS->getNumPoints() - 1;i++)
{
double X = poLS->getX(i);
double Y = poLS->getY(i);
double x = adfGeoTransformInv[0] + X * adfGeoTransformInv[1] + Y * adfGeoTransformInv[2];
double y = adfGeoTransformInv[3] + X * adfGeoTransformInv[4] + Y * adfGeoTransformInv[5];
poNeatLineArray->Add(x / dfUserUnit + psMargins->nLeft, TRUE);
poNeatLineArray->Add((nHeight - y) / dfUserUnit + psMargins->nBottom, TRUE);
}
}
}
delete poGeom;
}
if( pszNEATLINE != NULL && EQUAL(pszNEATLINE, "NO") )
{
// Do nothing
}
else if( pasGCPList && poNeatLineArray == NULL)
{
if (nGCPCount == 4)
{
int iUL = 0, iUR = 0, iLR = 0, iLL = 0;
GDALPDFFind4Corners(pasGCPList,
iUL,iUR, iLR, iLL);
double adfNL[8];
adfNL[0] = pasGCPList[iUL].dfGCPPixel / dfUserUnit + psMargins->nLeft;
adfNL[1] = (nHeight - pasGCPList[iUL].dfGCPLine) / dfUserUnit + psMargins->nBottom;
adfNL[2] = pasGCPList[iLL].dfGCPPixel / dfUserUnit + psMargins->nLeft;
adfNL[3] = (nHeight - pasGCPList[iLL].dfGCPLine) / dfUserUnit + psMargins->nBottom;
adfNL[4] = pasGCPList[iLR].dfGCPPixel / dfUserUnit + psMargins->nLeft;
adfNL[5] = (nHeight - pasGCPList[iLR].dfGCPLine) / dfUserUnit + psMargins->nBottom;
adfNL[6] = pasGCPList[iUR].dfGCPPixel / dfUserUnit + psMargins->nLeft;
adfNL[7] = (nHeight - pasGCPList[iUR].dfGCPLine) / dfUserUnit + psMargins->nBottom;
poNeatLineArray = new GDALPDFArrayRW();
poNeatLineArray->Add(adfNL, 8, TRUE);
}
else
{
poNeatLineArray = new GDALPDFArrayRW();
// FIXME : ensure that they are in clockwise order ?
int i;
for(i = 0; i < nGCPCount; i++)
{
poNeatLineArray->Add(pasGCPList[i].dfGCPPixel / dfUserUnit + psMargins->nLeft, TRUE);
poNeatLineArray->Add((nHeight - pasGCPList[i].dfGCPLine) / dfUserUnit + psMargins->nBottom, TRUE);
}
}
}
else if (poNeatLineArray == NULL)
{
poNeatLineArray = new GDALPDFArrayRW();
poNeatLineArray->Add(0 / dfUserUnit + psMargins->nLeft, TRUE);
poNeatLineArray->Add((nHeight - 0) / dfUserUnit + psMargins->nBottom, TRUE);
poNeatLineArray->Add(0 / dfUserUnit + psMargins->nLeft, TRUE);
poNeatLineArray->Add((nHeight -nHeight) / dfUserUnit + psMargins->nBottom, TRUE);
poNeatLineArray->Add(nWidth / dfUserUnit + psMargins->nLeft, TRUE);
poNeatLineArray->Add((nHeight -nHeight) / dfUserUnit + psMargins->nBottom, TRUE);
poNeatLineArray->Add(nWidth / dfUserUnit + psMargins->nLeft, TRUE);
poNeatLineArray->Add((nHeight - 0) / dfUserUnit + psMargins->nBottom, TRUE);
}
int nLGIDictId = AllocNewObject();
StartObj(nLGIDictId);
GDALPDFDictionaryRW oLGIDict;
oLGIDict.Add("Type", GDALPDFObjectRW::CreateName("LGIDict"))
.Add("Version", "2.1");
if( bHasGT )
{
double adfCTM[6];
double dfX1 = psMargins->nLeft;
double dfY2 = nHeight / dfUserUnit + psMargins->nBottom ;
adfCTM[0] = adfGeoTransform[1] * dfUserUnit;
adfCTM[1] = adfGeoTransform[2] * dfUserUnit;
adfCTM[2] = - adfGeoTransform[4] * dfUserUnit;
adfCTM[3] = - adfGeoTransform[5] * dfUserUnit;
adfCTM[4] = adfGeoTransform[0] - (adfCTM[0] * dfX1 + adfCTM[2] * dfY2);
adfCTM[5] = adfGeoTransform[3] - (adfCTM[1] * dfX1 + adfCTM[3] * dfY2);
oLGIDict.Add("CTM", &((new GDALPDFArrayRW())->Add(adfCTM, 6, TRUE)));
}
else
{
GDALPDFArrayRW* poRegistrationArray = new GDALPDFArrayRW();
int i;
for(i = 0; i < nGCPCount; i++)
{
GDALPDFArrayRW* poPTArray = new GDALPDFArrayRW();
poPTArray->Add(pasGCPList[i].dfGCPPixel / dfUserUnit + psMargins->nLeft, TRUE);
poPTArray->Add((nHeight - pasGCPList[i].dfGCPLine) / dfUserUnit + psMargins->nBottom, TRUE);
poPTArray->Add(pasGCPList[i].dfGCPX, TRUE);
poPTArray->Add(pasGCPList[i].dfGCPY, TRUE);
poRegistrationArray->Add(poPTArray);
}
oLGIDict.Add("Registration", poRegistrationArray);
}
if( poNeatLineArray )
{
oLGIDict.Add("Neatline", poNeatLineArray);
}
const OGR_SRSNode* poNode = poSRS->GetRoot();
if( poNode != NULL )
poNode = poNode->GetChild(0);
const char* pszDescription = NULL;
if( poNode != NULL )
pszDescription = poNode->GetValue();
if( pszDescription )
{
oLGIDict.Add("Description", pszDescription);
}
oLGIDict.Add("Projection", poProjectionDict);
/* GDAL extension */
if( CSLTestBoolean( CPLGetConfigOption("GDAL_PDF_OGC_BP_WRITE_WKT", "TRUE") ) )
poProjectionDict->Add("WKT", pszWKT);
VSIFPrintfL(fp, "%s\n", oLGIDict.Serialize().c_str());
EndObj();
OSRDestroySpatialReference(hSRS);
return nLGIDictId;
}
/************************************************************************/
/* GDALPDFGetValueFromDSOrOption() */
/************************************************************************/
static const char* GDALPDFGetValueFromDSOrOption(GDALDataset* poSrcDS,
char** papszOptions,
const char* pszKey)
{
const char* pszValue = CSLFetchNameValue(papszOptions, pszKey);
if (pszValue == NULL)
pszValue = poSrcDS->GetMetadataItem(pszKey);
if (pszValue != NULL && pszValue[0] == '\0')
return NULL;
else
return pszValue;
}
/************************************************************************/
/* SetInfo() */
/************************************************************************/
int GDALPDFWriter::SetInfo(GDALDataset* poSrcDS,
char** papszOptions)
{
const char* pszAUTHOR = GDALPDFGetValueFromDSOrOption(poSrcDS, papszOptions, "AUTHOR");
const char* pszPRODUCER = GDALPDFGetValueFromDSOrOption(poSrcDS, papszOptions, "PRODUCER");
const char* pszCREATOR = GDALPDFGetValueFromDSOrOption(poSrcDS, papszOptions, "CREATOR");
const char* pszCREATION_DATE = GDALPDFGetValueFromDSOrOption(poSrcDS, papszOptions, "CREATION_DATE");
const char* pszSUBJECT = GDALPDFGetValueFromDSOrOption(poSrcDS, papszOptions, "SUBJECT");
const char* pszTITLE = GDALPDFGetValueFromDSOrOption(poSrcDS, papszOptions, "TITLE");
const char* pszKEYWORDS = GDALPDFGetValueFromDSOrOption(poSrcDS, papszOptions, "KEYWORDS");
if (pszAUTHOR == NULL && pszPRODUCER == NULL && pszCREATOR == NULL && pszCREATION_DATE == NULL &&
pszSUBJECT == NULL && pszTITLE == NULL && pszKEYWORDS == NULL)
return 0;
if (nInfoId == 0)
nInfoId = AllocNewObject();
StartObj(nInfoId, nInfoGen);
GDALPDFDictionaryRW oDict;
if (pszAUTHOR != NULL)
oDict.Add("Author", pszAUTHOR);
if (pszPRODUCER != NULL)
oDict.Add("Producer", pszPRODUCER);
if (pszCREATOR != NULL)
oDict.Add("Creator", pszCREATOR);
if (pszCREATION_DATE != NULL)
oDict.Add("CreationDate", pszCREATION_DATE);
if (pszSUBJECT != NULL)
oDict.Add("Subject", pszSUBJECT);
if (pszTITLE != NULL)
oDict.Add("Title", pszTITLE);
if (pszKEYWORDS != NULL)
oDict.Add("Keywords", pszKEYWORDS);
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
EndObj();
return nInfoId;
}
/************************************************************************/
/* SetXMP() */
/************************************************************************/
int GDALPDFWriter::SetXMP(GDALDataset* poSrcDS,
const char* pszXMP)
{
if (pszXMP != NULL && EQUALN(pszXMP, "NO", 2))
return 0;
if (pszXMP != NULL && pszXMP[0] == '\0')
return 0;
char** papszXMP = poSrcDS->GetMetadata("xml:XMP");
if (pszXMP == NULL && papszXMP != NULL && papszXMP[0] != NULL)
pszXMP = papszXMP[0];
if (pszXMP == NULL)
return 0;
CPLXMLNode* psNode = CPLParseXMLString(pszXMP);
if (psNode == NULL)
return 0;
CPLDestroyXMLNode(psNode);
if(nXMPId == 0)
nXMPId = AllocNewObject();
StartObj(nXMPId, nXMPGen);
GDALPDFDictionaryRW oDict;
oDict.Add("Type", GDALPDFObjectRW::CreateName("Metadata"))
.Add("Subtype", GDALPDFObjectRW::CreateName("XML"))
.Add("Length", (int)strlen(pszXMP));
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
VSIFPrintfL(fp, "stream\n");
VSIFPrintfL(fp, "%s\n", pszXMP);
VSIFPrintfL(fp, "endstream\n");
EndObj();
return nXMPId;
}
/************************************************************************/
/* WriteOCG() */
/************************************************************************/
int GDALPDFWriter::WriteOCG(const char* pszLayerName, int nParentId)
{
if (pszLayerName == NULL || pszLayerName[0] == '\0')
return 0;
int nOGCId = AllocNewObject();
GDALPDFOCGDesc oOCGDesc;
oOCGDesc.nId = nOGCId;
oOCGDesc.nParentId = nParentId;
oOCGDesc.osLayerName = pszLayerName;
asOCGs.push_back(oOCGDesc);
StartObj(nOGCId);
{
GDALPDFDictionaryRW oDict;
oDict.Add("Type", GDALPDFObjectRW::CreateName("OCG"));
oDict.Add("Name", pszLayerName);
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
}
EndObj();
return nOGCId;
}
/************************************************************************/
/* StartPage() */
/************************************************************************/
int GDALPDFWriter::StartPage(GDALDataset* poClippingDS,
double dfDPI,
const char* pszGEO_ENCODING,
const char* pszNEATLINE,
PDFMargins* psMargins,
PDFCompressMethod eStreamCompressMethod,
int bHasOGRData)
{
int nWidth = poClippingDS->GetRasterXSize();
int nHeight = poClippingDS->GetRasterYSize();
int nBands = poClippingDS->GetRasterCount();
double dfUserUnit = dfDPI * USER_UNIT_IN_INCH;
double dfWidthInUserUnit = nWidth / dfUserUnit + psMargins->nLeft + psMargins->nRight;
double dfHeightInUserUnit = nHeight / dfUserUnit + psMargins->nBottom + psMargins->nTop;
int nPageId = AllocNewObject();
asPageId.push_back(nPageId);
int nContentId = AllocNewObject();
int nResourcesId = AllocNewObject();
int nAnnotsId = AllocNewObject();
int bISO32000 = EQUAL(pszGEO_ENCODING, "ISO32000") ||
EQUAL(pszGEO_ENCODING, "BOTH");
int bOGC_BP = EQUAL(pszGEO_ENCODING, "OGC_BP") ||
EQUAL(pszGEO_ENCODING, "BOTH");
int nViewportId = 0;
if( bISO32000 )
nViewportId = WriteSRS_ISO32000(poClippingDS, dfUserUnit, pszNEATLINE, psMargins, TRUE);
int nLGIDictId = 0;
if( bOGC_BP )
nLGIDictId = WriteSRS_OGC_BP(poClippingDS, dfUserUnit, pszNEATLINE, psMargins);
StartObj(nPageId);
GDALPDFDictionaryRW oDictPage;
oDictPage.Add("Type", GDALPDFObjectRW::CreateName("Page"))
.Add("Parent", nPageResourceId, 0)
.Add("MediaBox", &((new GDALPDFArrayRW())
->Add(0).Add(0).Add(dfWidthInUserUnit).Add(dfHeightInUserUnit)))
.Add("UserUnit", dfUserUnit)
.Add("Contents", nContentId, 0)
.Add("Resources", nResourcesId, 0)
.Add("Annots", nAnnotsId, 0);
if (nBands == 4)
{
oDictPage.Add("Group",
&((new GDALPDFDictionaryRW())
->Add("Type", GDALPDFObjectRW::CreateName("Group"))
.Add("S", GDALPDFObjectRW::CreateName("Transparency"))
.Add("CS", GDALPDFObjectRW::CreateName("DeviceRGB"))));
}
if (nViewportId)
{
oDictPage.Add("VP", &((new GDALPDFArrayRW())
->Add(nViewportId, 0)));
}
if (nLGIDictId)
{
oDictPage.Add("LGIDict", nLGIDictId, 0);
}
if (bHasOGRData)
oDictPage.Add("StructParents", 0);
VSIFPrintfL(fp, "%s\n", oDictPage.Serialize().c_str());
EndObj();
oPageContext.poClippingDS = poClippingDS;
oPageContext.nPageId = nPageId;
oPageContext.nContentId = nContentId;
oPageContext.nResourcesId = nResourcesId;
oPageContext.nAnnotsId = nAnnotsId;
oPageContext.dfDPI = dfDPI;
oPageContext.sMargins = *psMargins;
oPageContext.eStreamCompressMethod = eStreamCompressMethod;
return TRUE;
}
/************************************************************************/
/* WriteColorTable() */
/************************************************************************/
int GDALPDFWriter::WriteColorTable(GDALDataset* poSrcDS)
{
/* Does the source image has a color table ? */
GDALColorTable* poCT = NULL;
if (poSrcDS->GetRasterCount() > 0)
poCT = poSrcDS->GetRasterBand(1)->GetColorTable();
int nColorTableId = 0;
if (poCT != NULL && poCT->GetColorEntryCount() <= 256)
{
int nColors = poCT->GetColorEntryCount();
nColorTableId = AllocNewObject();
int nLookupTableId = AllocNewObject();
/* Index object */
StartObj(nColorTableId);
{
GDALPDFArrayRW oArray;
oArray.Add(GDALPDFObjectRW::CreateName("Indexed"))
.Add(&((new GDALPDFArrayRW())->Add(GDALPDFObjectRW::CreateName("DeviceRGB"))))
.Add(nColors-1)
.Add(nLookupTableId, 0);
VSIFPrintfL(fp, "%s\n", oArray.Serialize().c_str());
}
EndObj();
/* Lookup table object */
StartObj(nLookupTableId);
{
GDALPDFDictionaryRW oDict;
oDict.Add("Length", nColors * 3);
VSIFPrintfL(fp, "%s %% Lookup table\n", oDict.Serialize().c_str());
}
VSIFPrintfL(fp, "stream\n");
GByte pabyLookup[768];
for(int i=0;i<nColors;i++)
{
const GDALColorEntry* poEntry = poCT->GetColorEntry(i);
pabyLookup[3 * i + 0] = (GByte)poEntry->c1;
pabyLookup[3 * i + 1] = (GByte)poEntry->c2;
pabyLookup[3 * i + 2] = (GByte)poEntry->c3;
}
VSIFWriteL(pabyLookup, 3 * nColors, 1, fp);
VSIFPrintfL(fp, "\n");
VSIFPrintfL(fp, "endstream\n");
EndObj();
}
return nColorTableId;
}
/************************************************************************/
/* WriteImagery() */
/************************************************************************/
int GDALPDFWriter::WriteImagery(GDALDataset* poDS,
const char* pszLayerName,
PDFCompressMethod eCompressMethod,
int nPredictor,
int nJPEGQuality,
const char* pszJPEG2000_DRIVER,
int nBlockXSize, int nBlockYSize,
GDALProgressFunc pfnProgress,
void * pProgressData)
{
int nWidth = poDS->GetRasterXSize();
int nHeight = poDS->GetRasterYSize();
double dfUserUnit = oPageContext.dfDPI * USER_UNIT_IN_INCH;
GDALPDFRasterDesc oRasterDesc;
if( pfnProgress == NULL )
pfnProgress = GDALDummyProgress;
oRasterDesc.nOCGRasterId = WriteOCG(pszLayerName);
/* Does the source image has a color table ? */
int nColorTableId = WriteColorTable(poDS);
int nXBlocks = (nWidth + nBlockXSize - 1) / nBlockXSize;
int nYBlocks = (nHeight + nBlockYSize - 1) / nBlockYSize;
int nBlocks = nXBlocks * nYBlocks;
int nBlockXOff, nBlockYOff;
for(nBlockYOff = 0; nBlockYOff < nYBlocks; nBlockYOff ++)
{
for(nBlockXOff = 0; nBlockXOff < nXBlocks; nBlockXOff ++)
{
int nReqWidth = MIN(nBlockXSize, nWidth - nBlockXOff * nBlockXSize);
int nReqHeight = MIN(nBlockYSize, nHeight - nBlockYOff * nBlockYSize);
int iImage = nBlockYOff * nXBlocks + nBlockXOff;
void* pScaledData = GDALCreateScaledProgress( iImage / (double)nBlocks,
(iImage + 1) / (double)nBlocks,
pfnProgress, pProgressData);
int nX = nBlockXOff * nBlockXSize;
int nY = nBlockYOff * nBlockYSize;
int nImageId = WriteBlock(poDS,
nX,
nY,
nReqWidth, nReqHeight,
nColorTableId,
eCompressMethod,
nPredictor,
nJPEGQuality,
pszJPEG2000_DRIVER,
GDALScaledProgress,
pScaledData);
GDALDestroyScaledProgress(pScaledData);
if (nImageId == 0)
return FALSE;
GDALPDFImageDesc oImageDesc;
oImageDesc.nImageId = nImageId;
oImageDesc.dfXOff = nX / dfUserUnit + oPageContext.sMargins.nLeft;
oImageDesc.dfYOff = (nHeight - nY - nReqHeight) / dfUserUnit + oPageContext.sMargins.nBottom;
oImageDesc.dfXSize = nReqWidth / dfUserUnit;
oImageDesc.dfYSize = nReqHeight / dfUserUnit;
oRasterDesc.asImageDesc.push_back(oImageDesc);
}
}
oPageContext.asRasterDesc.push_back(oRasterDesc);
return TRUE;
}
/************************************************************************/
/* WriteClippedImagery() */
/************************************************************************/
int GDALPDFWriter::WriteClippedImagery(
GDALDataset* poDS,
const char* pszLayerName,
PDFCompressMethod eCompressMethod,
int nPredictor,
int nJPEGQuality,
const char* pszJPEG2000_DRIVER,
int nBlockXSize, int nBlockYSize,
GDALProgressFunc pfnProgress,
void * pProgressData)
{
double dfUserUnit = oPageContext.dfDPI * USER_UNIT_IN_INCH;
GDALPDFRasterDesc oRasterDesc;
/* Get clipping dataset bounding-box */
double adfClippingGeoTransform[6];
GDALDataset* poClippingDS = oPageContext.poClippingDS;
poClippingDS->GetGeoTransform(adfClippingGeoTransform);
int nClippingWidth = poClippingDS->GetRasterXSize();
int nClippingHeight = poClippingDS->GetRasterYSize();
double dfClippingMinX = adfClippingGeoTransform[0];
double dfClippingMaxX = dfClippingMinX + nClippingWidth * adfClippingGeoTransform[1];
double dfClippingMaxY = adfClippingGeoTransform[3];
double dfClippingMinY = dfClippingMaxY + nClippingHeight * adfClippingGeoTransform[5];
if( dfClippingMaxY < dfClippingMinY )
{
double dfTmp = dfClippingMinY;
dfClippingMinY = dfClippingMaxY;
dfClippingMaxY = dfTmp;
}
/* Get current dataset dataset bounding-box */
double adfGeoTransform[6];
poDS->GetGeoTransform(adfGeoTransform);
int nWidth = poDS->GetRasterXSize();
int nHeight = poDS->GetRasterYSize();
double dfRasterMinX = adfGeoTransform[0];
//double dfRasterMaxX = dfRasterMinX + nWidth * adfGeoTransform[1];
double dfRasterMaxY = adfGeoTransform[3];
double dfRasterMinY = dfRasterMaxY + nHeight * adfGeoTransform[5];
if( dfRasterMaxY < dfRasterMinY )
{
double dfTmp = dfRasterMinY;
dfRasterMinY = dfRasterMaxY;
dfRasterMaxY = dfTmp;
}
if( pfnProgress == NULL )
pfnProgress = GDALDummyProgress;
oRasterDesc.nOCGRasterId = WriteOCG(pszLayerName);
/* Does the source image has a color table ? */
int nColorTableId = WriteColorTable(poDS);
int nXBlocks = (nWidth + nBlockXSize - 1) / nBlockXSize;
int nYBlocks = (nHeight + nBlockYSize - 1) / nBlockYSize;
int nBlocks = nXBlocks * nYBlocks;
int nBlockXOff, nBlockYOff;
for(nBlockYOff = 0; nBlockYOff < nYBlocks; nBlockYOff ++)
{
for(nBlockXOff = 0; nBlockXOff < nXBlocks; nBlockXOff ++)
{
int nReqWidth = MIN(nBlockXSize, nWidth - nBlockXOff * nBlockXSize);
int nReqHeight = MIN(nBlockYSize, nHeight - nBlockYOff * nBlockYSize);
int iImage = nBlockYOff * nXBlocks + nBlockXOff;
void* pScaledData = GDALCreateScaledProgress( iImage / (double)nBlocks,
(iImage + 1) / (double)nBlocks,
pfnProgress, pProgressData);
int nX = nBlockXOff * nBlockXSize;
int nY = nBlockYOff * nBlockYSize;
/* Compute extent of block to write */
double dfBlockMinX = adfGeoTransform[0] + nX * adfGeoTransform[1];
double dfBlockMaxX = adfGeoTransform[0] + (nX + nReqWidth) * adfGeoTransform[1];
double dfBlockMinY = adfGeoTransform[3] + (nY + nReqHeight) * adfGeoTransform[5];
double dfBlockMaxY = adfGeoTransform[3] + nY * adfGeoTransform[5];
if( dfBlockMaxY < dfBlockMinY )
{
double dfTmp = dfBlockMinY;
dfBlockMinY = dfBlockMaxY;
dfBlockMaxY = dfTmp;
}
/* Clip the extent of the block with the extent of the main raster */
double dfIntersectMinX = MAX(dfBlockMinX, dfClippingMinX);
double dfIntersectMinY = MAX(dfBlockMinY, dfClippingMinY);
double dfIntersectMaxX = MIN(dfBlockMaxX, dfClippingMaxX);
double dfIntersectMaxY = MIN(dfBlockMaxY, dfClippingMaxY);
if( dfIntersectMinX < dfIntersectMaxX &&
dfIntersectMinY < dfIntersectMaxY )
{
/* Re-compute (x,y,width,height) subwindow of current raster from */
/* the extent of the clipped block */
nX = (int)((dfIntersectMinX - dfRasterMinX) / adfGeoTransform[1] + 0.5);
if( adfGeoTransform[5] < 0 )
nY = (int)((dfRasterMaxY - dfIntersectMaxY) / (-adfGeoTransform[5]) + 0.5);
else
nY = (int)((dfIntersectMinY - dfRasterMinY) / adfGeoTransform[5] + 0.5);
nReqWidth = (int)((dfIntersectMaxX - dfRasterMinX) / adfGeoTransform[1] + 0.5) - nX;
if( adfGeoTransform[5] < 0 )
nReqHeight = (int)((dfRasterMaxY - dfIntersectMinY) / (-adfGeoTransform[5]) + 0.5) - nY;
else
nReqHeight = (int)((dfIntersectMaxY - dfRasterMinY) / adfGeoTransform[5] + 0.5) - nY;
if( nReqWidth > 0 && nReqHeight > 0)
{
int nImageId = WriteBlock(poDS,
nX,
nY,
nReqWidth, nReqHeight,
nColorTableId,
eCompressMethod,
nPredictor,
nJPEGQuality,
pszJPEG2000_DRIVER,
GDALScaledProgress,
pScaledData);
if (nImageId == 0)
{
GDALDestroyScaledProgress(pScaledData);
return FALSE;
}
/* Compute the subwindow in image coordinates of the main raster corresponding */
/* to the extent of the clipped block */
double dfXInClippingUnits, dfYInClippingUnits, dfReqWidthInClippingUnits, dfReqHeightInClippingUnits;
dfXInClippingUnits = (dfIntersectMinX - dfClippingMinX) / adfClippingGeoTransform[1];
if( adfClippingGeoTransform[5] < 0 )
dfYInClippingUnits = (dfClippingMaxY - dfIntersectMaxY) / (-adfClippingGeoTransform[5]);
else
dfYInClippingUnits = (dfIntersectMinY - dfClippingMinY) / adfClippingGeoTransform[5];
dfReqWidthInClippingUnits = (dfIntersectMaxX - dfClippingMinX) / adfClippingGeoTransform[1] - dfXInClippingUnits;
if( adfClippingGeoTransform[5] < 0 )
dfReqHeightInClippingUnits = (dfClippingMaxY - dfIntersectMinY) / (-adfClippingGeoTransform[5]) - dfYInClippingUnits;
else
dfReqHeightInClippingUnits = (dfIntersectMaxY - dfClippingMinY) / adfClippingGeoTransform[5] - dfYInClippingUnits;
GDALPDFImageDesc oImageDesc;
oImageDesc.nImageId = nImageId;
oImageDesc.dfXOff = dfXInClippingUnits / dfUserUnit + oPageContext.sMargins.nLeft;
oImageDesc.dfYOff = (nClippingHeight - dfYInClippingUnits - dfReqHeightInClippingUnits) / dfUserUnit + oPageContext.sMargins.nBottom;
oImageDesc.dfXSize = dfReqWidthInClippingUnits / dfUserUnit;
oImageDesc.dfYSize = dfReqHeightInClippingUnits / dfUserUnit;
oRasterDesc.asImageDesc.push_back(oImageDesc);
}
}
GDALDestroyScaledProgress(pScaledData);
}
}
oPageContext.asRasterDesc.push_back(oRasterDesc);
return TRUE;
}
#ifdef OGR_ENABLED
/************************************************************************/
/* WriteOGRDataSource() */
/************************************************************************/
int GDALPDFWriter::WriteOGRDataSource(const char* pszOGRDataSource,
const char* pszOGRDisplayField,
const char* pszOGRDisplayLayerNames,
const char* pszOGRLinkField,
int bWriteOGRAttributes)
{
if (OGRGetDriverCount() == 0)
OGRRegisterAll();
OGRDataSourceH hDS = OGROpen(pszOGRDataSource, 0, NULL);
if (hDS == NULL)
return FALSE;
int iObj = 0;
int nLayers = OGR_DS_GetLayerCount(hDS);
char** papszLayerNames = CSLTokenizeString2(pszOGRDisplayLayerNames,",",0);
for(int iLayer = 0; iLayer < nLayers; iLayer ++)
{
CPLString osLayerName;
if (CSLCount(papszLayerNames) < nLayers)
osLayerName = OGR_L_GetName(OGR_DS_GetLayer(hDS, iLayer));
else
osLayerName = papszLayerNames[iLayer];
WriteOGRLayer(hDS, iLayer,
pszOGRDisplayField,
pszOGRLinkField,
osLayerName,
bWriteOGRAttributes,
iObj);
}
OGRReleaseDataSource(hDS);
CSLDestroy(papszLayerNames);
return TRUE;
}
/************************************************************************/
/* StartOGRLayer() */
/************************************************************************/
GDALPDFLayerDesc GDALPDFWriter::StartOGRLayer(CPLString osLayerName,
int bWriteOGRAttributes)
{
GDALPDFLayerDesc osVectorDesc;
osVectorDesc.osLayerName = osLayerName;
osVectorDesc.bWriteOGRAttributes = bWriteOGRAttributes;
osVectorDesc.nOGCId = WriteOCG(osLayerName);
osVectorDesc.nFeatureLayerId = (bWriteOGRAttributes) ? AllocNewObject() : 0;
osVectorDesc.nOCGTextId = 0;
return osVectorDesc;
}
/************************************************************************/
/* EndOGRLayer() */
/************************************************************************/
void GDALPDFWriter::EndOGRLayer(GDALPDFLayerDesc& osVectorDesc)
{
if (osVectorDesc.bWriteOGRAttributes)
{
StartObj(osVectorDesc.nFeatureLayerId);
GDALPDFDictionaryRW oDict;
oDict.Add("A", &(new GDALPDFDictionaryRW())->Add("O",
GDALPDFObjectRW::CreateName("UserProperties")));
GDALPDFArrayRW* poArray = new GDALPDFArrayRW();
oDict.Add("K", poArray);
for(int i = 0; i < (int)osVectorDesc.aUserPropertiesIds.size(); i++)
{
poArray->Add(osVectorDesc.aUserPropertiesIds[i], 0);
}
if (nStructTreeRootId == 0)
nStructTreeRootId = AllocNewObject();
oDict.Add("P", nStructTreeRootId, 0);
oDict.Add("S", GDALPDFObjectRW::CreateName("Feature"));
oDict.Add("T", osVectorDesc.osLayerName);
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
EndObj();
}
oPageContext.asVectorDesc.push_back(osVectorDesc);
}
/************************************************************************/
/* WriteOGRLayer() */
/************************************************************************/
int GDALPDFWriter::WriteOGRLayer(OGRDataSourceH hDS,
int iLayer,
const char* pszOGRDisplayField,
const char* pszOGRLinkField,
CPLString osLayerName,
int bWriteOGRAttributes,
int& iObj)
{
GDALDataset* poClippingDS = oPageContext.poClippingDS;
double adfGeoTransform[6];
if (poClippingDS->GetGeoTransform(adfGeoTransform) != CE_None)
return FALSE;
GDALPDFLayerDesc osVectorDesc = StartOGRLayer(osLayerName,
bWriteOGRAttributes);
OGRLayerH hLyr = OGR_DS_GetLayer(hDS, iLayer);
const char* pszWKT = poClippingDS->GetProjectionRef();
OGRSpatialReferenceH hGDAL_SRS = NULL;
if( pszWKT && pszWKT[0] != '\0' )
hGDAL_SRS = OSRNewSpatialReference(pszWKT);
OGRSpatialReferenceH hOGR_SRS = OGR_L_GetSpatialRef(hLyr);
OGRCoordinateTransformationH hCT = NULL;
if( hGDAL_SRS == NULL && hOGR_SRS != NULL )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Vector layer has a SRS set, but Raster layer has no SRS set. Assuming they are the same.");
}
else if( hGDAL_SRS != NULL && hOGR_SRS == NULL )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Vector layer has no SRS set, but Raster layer has a SRS set. Assuming they are the same.");
}
else if( hGDAL_SRS != NULL && hOGR_SRS != NULL )
{
if (!OSRIsSame(hGDAL_SRS, hOGR_SRS))
{
hCT = OCTNewCoordinateTransformation( hOGR_SRS, hGDAL_SRS );
if( hCT == NULL )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Cannot compute coordinate transformation from vector SRS to raster SRS");
}
}
}
if( hCT == NULL )
{
double dfXMin = adfGeoTransform[0];
double dfYMin = adfGeoTransform[3] + poClippingDS->GetRasterYSize() * adfGeoTransform[5];
double dfXMax = adfGeoTransform[0] + poClippingDS->GetRasterXSize() * adfGeoTransform[1];
double dfYMax = adfGeoTransform[3];
OGR_L_SetSpatialFilterRect(hLyr, dfXMin, dfYMin, dfXMax, dfYMax);
}
OGRFeatureH hFeat;
int iObjLayer = 0;
while( (hFeat = OGR_L_GetNextFeature(hLyr)) != NULL)
{
WriteOGRFeature(osVectorDesc,
hFeat,
hCT,
pszOGRDisplayField,
pszOGRLinkField,
bWriteOGRAttributes,
iObj,
iObjLayer);
OGR_F_Destroy(hFeat);
}
EndOGRLayer(osVectorDesc);
if( hCT != NULL )
OCTDestroyCoordinateTransformation(hCT);
if( hGDAL_SRS != NULL )
OSRDestroySpatialReference(hGDAL_SRS);
return TRUE;
}
/************************************************************************/
/* DrawGeometry() */
/************************************************************************/
static void DrawGeometry(VSILFILE* fp, OGRGeometryH hGeom, double adfMatrix[4], int bPaint = TRUE)
{
switch(wkbFlatten(OGR_G_GetGeometryType(hGeom)))
{
case wkbLineString:
{
int nPoints = OGR_G_GetPointCount(hGeom);
for(int i=0;i<nPoints;i++)
{
double dfX = OGR_G_GetX(hGeom, i) * adfMatrix[1] + adfMatrix[0];
double dfY = OGR_G_GetY(hGeom, i) * adfMatrix[3] + adfMatrix[2];
VSIFPrintfL(fp, "%f %f %c\n", dfX, dfY, (i == 0) ? 'm' : 'l');
}
if (bPaint)
VSIFPrintfL(fp, "S\n");
break;
}
case wkbPolygon:
{
int nParts = OGR_G_GetGeometryCount(hGeom);
for(int i=0;i<nParts;i++)
{
DrawGeometry(fp, OGR_G_GetGeometryRef(hGeom, i), adfMatrix, FALSE);
VSIFPrintfL(fp, "h\n");
}
if (bPaint)
VSIFPrintfL(fp, "b*\n");
break;
}
case wkbMultiLineString:
{
int nParts = OGR_G_GetGeometryCount(hGeom);
for(int i=0;i<nParts;i++)
{
DrawGeometry(fp, OGR_G_GetGeometryRef(hGeom, i), adfMatrix, FALSE);
}
if (bPaint)
VSIFPrintfL(fp, "S\n");
break;
}
case wkbMultiPolygon:
{
int nParts = OGR_G_GetGeometryCount(hGeom);
for(int i=0;i<nParts;i++)
{
DrawGeometry(fp, OGR_G_GetGeometryRef(hGeom, i), adfMatrix, FALSE);
}
if (bPaint)
VSIFPrintfL(fp, "b*\n");
break;
}
default:
break;
}
}
/************************************************************************/
/* WriteOGRFeature() */
/************************************************************************/
int GDALPDFWriter::WriteOGRFeature(GDALPDFLayerDesc& osVectorDesc,
OGRFeatureH hFeat,
OGRCoordinateTransformationH hCT,
const char* pszOGRDisplayField,
const char* pszOGRLinkField,
int bWriteOGRAttributes,
int& iObj,
int& iObjLayer)
{
GDALDataset* poClippingDS = oPageContext.poClippingDS;
int nHeight = poClippingDS->GetRasterYSize();
double dfUserUnit = oPageContext.dfDPI * USER_UNIT_IN_INCH;
double adfGeoTransform[6];
poClippingDS->GetGeoTransform(adfGeoTransform);
double adfMatrix[4];
adfMatrix[0] = - adfGeoTransform[0] / (adfGeoTransform[1] * dfUserUnit) + oPageContext.sMargins.nLeft;
adfMatrix[1] = 1.0 / (adfGeoTransform[1] * dfUserUnit);
adfMatrix[2] = - (adfGeoTransform[3] + adfGeoTransform[5] * nHeight) / (-adfGeoTransform[5] * dfUserUnit) + oPageContext.sMargins.nBottom;
adfMatrix[3] = 1.0 / (-adfGeoTransform[5] * dfUserUnit);
OGRGeometryH hGeom = OGR_F_GetGeometryRef(hFeat);
if (hGeom == NULL)
{
return TRUE;
}
OGREnvelope sEnvelope;
if( hCT != NULL )
{
/* Reproject */
if( OGR_G_Transform(hGeom, hCT) != OGRERR_NONE )
{
return TRUE;
}
OGREnvelope sRasterEnvelope;
sRasterEnvelope.MinX = adfGeoTransform[0];
sRasterEnvelope.MinY = adfGeoTransform[3] + poClippingDS->GetRasterYSize() * adfGeoTransform[5];
sRasterEnvelope.MaxX = adfGeoTransform[0] + poClippingDS->GetRasterXSize() * adfGeoTransform[1];
sRasterEnvelope.MaxY = adfGeoTransform[3];
/* Check that the reprojected geometry interescts the raster envelope */
OGR_G_GetEnvelope(hGeom, &sEnvelope);
if( !(sRasterEnvelope.Intersects(sEnvelope)) )
{
return TRUE;
}
}
else
{
OGR_G_GetEnvelope(hGeom, &sEnvelope);
}
/* -------------------------------------------------------------- */
/* Get style */
/* -------------------------------------------------------------- */
int nPenR = 0, nPenG = 0, nPenB = 0, nPenA = 255;
int nBrushR = 127, nBrushG = 127, nBrushB = 127, nBrushA = 127;
int nTextR = 0, nTextG = 0, nTextB = 0, nTextA = 255;
int bSymbolColorDefined = FALSE;
int nSymbolR = 0, nSymbolG = 0, nSymbolB = 0, nSymbolA = 255;
double dfTextSize = 12, dfTextAngle = 0, dfTextDx = 0, dfTextDy = 0;
double dfPenWidth = 1;
double dfSymbolSize = 5;
CPLString osDashArray;
CPLString osLabelText;
CPLString osSymbolId;
int nImageSymbolId = 0, nImageWidth = 0, nImageHeight = 0;
OGRStyleMgrH hSM = OGR_SM_Create(NULL);
OGR_SM_InitFromFeature(hSM, hFeat);
int nCount = OGR_SM_GetPartCount(hSM, NULL);
for(int iPart = 0; iPart < nCount; iPart++)
{
OGRStyleToolH hTool = OGR_SM_GetPart(hSM, iPart, NULL);
if (hTool)
{
if (OGR_ST_GetType(hTool) == OGRSTCPen)
{
int bIsNull = TRUE;
const char* pszColor = OGR_ST_GetParamStr(hTool, OGRSTPenColor, &bIsNull);
if (pszColor && !bIsNull)
{
int nRed = 0, nGreen = 0, nBlue = 0, nAlpha = 255;
int nVals = sscanf(pszColor,"#%2x%2x%2x%2x",&nRed,&nGreen,&nBlue,&nAlpha);
if (nVals >= 3)
{
nPenR = nRed;
nPenG = nGreen;
nPenB = nBlue;
if (nVals == 4)
nPenA = nAlpha;
}
}
const char* pszDash = OGR_ST_GetParamStr(hTool, OGRSTPenPattern, &bIsNull);
if (pszDash && !bIsNull)
{
char** papszTokens = CSLTokenizeString2(pszDash, " ", 0);
int nTokens = CSLCount(papszTokens);
if ((nTokens % 2) == 0)
{
for(int i=0;i<nTokens;i++)
{
osDashArray += CPLSPrintf("%d ", atoi(papszTokens[i]));
}
}
CSLDestroy(papszTokens);
}
//OGRSTUnitId eUnit = OGR_ST_GetUnit(hTool);
double dfWidth = OGR_ST_GetParamDbl(hTool, OGRSTPenWidth, &bIsNull);
if (!bIsNull)
dfPenWidth = dfWidth;
}
else if (OGR_ST_GetType(hTool) == OGRSTCBrush)
{
int bIsNull;
const char* pszColor = OGR_ST_GetParamStr(hTool, OGRSTBrushFColor, &bIsNull);
if (pszColor)
{
int nRed = 0, nGreen = 0, nBlue = 0, nAlpha = 255;
int nVals = sscanf(pszColor,"#%2x%2x%2x%2x",&nRed,&nGreen,&nBlue,&nAlpha);
if (nVals >= 3)
{
nBrushR = nRed;
nBrushG = nGreen;
nBrushB = nBlue;
if (nVals == 4)
nBrushA = nAlpha;
}
}
}
else if (OGR_ST_GetType(hTool) == OGRSTCLabel)
{
int bIsNull;
const char* pszStr = OGR_ST_GetParamStr(hTool, OGRSTLabelTextString, &bIsNull);
if (pszStr)
{
osLabelText = pszStr;
/* If the text is of the form {stuff}, then it means we want to fetch */
/* the value of the field "stuff" in the feature */
if( osLabelText.size() && osLabelText[0] == '{' &&
osLabelText[osLabelText.size() - 1] == '}' )
{
osLabelText = pszStr + 1;
osLabelText.resize(osLabelText.size() - 1);
int nIdxField = OGR_F_GetFieldIndex(hFeat, osLabelText);
if( nIdxField >= 0 )
osLabelText = OGR_F_GetFieldAsString(hFeat, nIdxField);
else
osLabelText = "";
}
}
const char* pszColor = OGR_ST_GetParamStr(hTool, OGRSTLabelFColor, &bIsNull);
if (pszColor && !bIsNull)
{
int nRed = 0, nGreen = 0, nBlue = 0, nAlpha = 255;
int nVals = sscanf(pszColor,"#%2x%2x%2x%2x",&nRed,&nGreen,&nBlue,&nAlpha);
if (nVals >= 3)
{
nTextR = nRed;
nTextG = nGreen;
nTextB = nBlue;
if (nVals == 4)
nTextA = nAlpha;
}
}
double dfVal = OGR_ST_GetParamDbl(hTool, OGRSTLabelSize, &bIsNull);
if (!bIsNull)
{
dfTextSize = dfVal;
}
dfVal = OGR_ST_GetParamDbl(hTool, OGRSTLabelAngle, &bIsNull);
if (!bIsNull)
{
dfTextAngle = dfVal;
}
dfVal = OGR_ST_GetParamDbl(hTool, OGRSTLabelDx, &bIsNull);
if (!bIsNull)
{
dfTextDx = dfVal;
}
dfVal = OGR_ST_GetParamDbl(hTool, OGRSTLabelDy, &bIsNull);
if (!bIsNull)
{
dfTextDy = dfVal;
}
}
else if (OGR_ST_GetType(hTool) == OGRSTCSymbol)
{
int bIsNull;
const char* pszSymbolId = OGR_ST_GetParamStr(hTool, OGRSTSymbolId, &bIsNull);
if (pszSymbolId && !bIsNull)
{
osSymbolId = pszSymbolId;
if (strstr(pszSymbolId, "ogr-sym-") == NULL)
{
if (oMapSymbolFilenameToDesc.find(osSymbolId) == oMapSymbolFilenameToDesc.end())
{
CPLPushErrorHandler(CPLQuietErrorHandler);
GDALDatasetH hImageDS = GDALOpen(osSymbolId, GA_ReadOnly);
CPLPopErrorHandler();
if (hImageDS != NULL)
{
nImageWidth = GDALGetRasterXSize(hImageDS);
nImageHeight = GDALGetRasterYSize(hImageDS);
nImageSymbolId = WriteBlock((GDALDataset*) hImageDS,
0, 0,
nImageWidth,
nImageHeight,
0,
COMPRESS_DEFAULT,
0,
-1,
NULL,
NULL,
NULL);
GDALClose(hImageDS);
}
GDALPDFImageDesc oDesc;
oDesc.nImageId = nImageSymbolId;
oDesc.dfXOff = 0;
oDesc.dfYOff = 0;
oDesc.dfXSize = nImageWidth;
oDesc.dfYSize = nImageHeight;
oMapSymbolFilenameToDesc[osSymbolId] = oDesc;
}
else
{
GDALPDFImageDesc& oDesc = oMapSymbolFilenameToDesc[osSymbolId];
nImageSymbolId = oDesc.nImageId;
nImageWidth = (int)oDesc.dfXSize;
nImageHeight = (int)oDesc.dfYSize;
}
}
}
double dfVal = OGR_ST_GetParamDbl(hTool, OGRSTSymbolSize, &bIsNull);
if (!bIsNull)
{
dfSymbolSize = dfVal;
}
const char* pszColor = OGR_ST_GetParamStr(hTool, OGRSTSymbolColor, &bIsNull);
if (pszColor && !bIsNull)
{
int nRed = 0, nGreen = 0, nBlue = 0, nAlpha = 255;
int nVals = sscanf(pszColor,"#%2x%2x%2x%2x",&nRed,&nGreen,&nBlue,&nAlpha);
if (nVals >= 3)
{
bSymbolColorDefined = TRUE;
nSymbolR = nRed;
nSymbolG = nGreen;
nSymbolB = nBlue;
if (nVals == 4)
nSymbolA = nAlpha;
}
}
}
OGR_ST_Destroy(hTool);
}
}
OGR_SM_Destroy(hSM);
if (wkbFlatten(OGR_G_GetGeometryType(hGeom)) == wkbPoint && bSymbolColorDefined)
{
nPenR = nSymbolR;
nPenG = nSymbolG;
nPenB = nSymbolB;
nPenA = nSymbolA;
nBrushR = nSymbolR;
nBrushG = nSymbolG;
nBrushB = nSymbolB;
nBrushA = nSymbolA;
}
double dfRadius = dfSymbolSize * dfUserUnit;
/* -------------------------------------------------------------- */
/* Write object dictionary */
/* -------------------------------------------------------------- */
int nObjectId = AllocNewObject();
int nObjectLengthId = AllocNewObject();
osVectorDesc.aIds.push_back(nObjectId);
int bboxXMin, bboxYMin, bboxXMax, bboxYMax;
if (wkbFlatten(OGR_G_GetGeometryType(hGeom)) == wkbPoint && nImageSymbolId != 0)
{
bboxXMin = (int)floor(sEnvelope.MinX * adfMatrix[1] + adfMatrix[0] - nImageWidth / 2);
bboxYMin = (int)floor(sEnvelope.MinY * adfMatrix[3] + adfMatrix[2] - nImageHeight / 2);
bboxXMax = (int)ceil(sEnvelope.MaxX * adfMatrix[1] + adfMatrix[0] + nImageWidth / 2);
bboxYMax = (int)ceil(sEnvelope.MaxY * adfMatrix[3] + adfMatrix[2] + nImageHeight / 2);
}
else
{
double dfMargin = dfPenWidth;
if( wkbFlatten(OGR_G_GetGeometryType(hGeom)) == wkbPoint )
{
if (osSymbolId == "ogr-sym-6" ||
osSymbolId == "ogr-sym-7")
{
const double dfSqrt3 = 1.73205080757;
dfMargin += dfRadius * 2 * dfSqrt3 / 3;
}
else
dfMargin += dfRadius;
}
bboxXMin = (int)floor(sEnvelope.MinX * adfMatrix[1] + adfMatrix[0] - dfMargin);
bboxYMin = (int)floor(sEnvelope.MinY * adfMatrix[3] + adfMatrix[2] - dfMargin);
bboxXMax = (int)ceil(sEnvelope.MaxX * adfMatrix[1] + adfMatrix[0] + dfMargin);
bboxYMax = (int)ceil(sEnvelope.MaxY * adfMatrix[3] + adfMatrix[2] + dfMargin);
}
int iField = -1;
const char* pszLinkVal = NULL;
if (pszOGRLinkField != NULL &&
(iField = OGR_FD_GetFieldIndex(OGR_F_GetDefnRef(hFeat), pszOGRLinkField)) >= 0 &&
OGR_F_IsFieldSet(hFeat, iField) &&
strcmp((pszLinkVal = OGR_F_GetFieldAsString(hFeat, iField)), "") != 0)
{
int nAnnotId = AllocNewObject();
oPageContext.anAnnotationsId.push_back(nAnnotId);
StartObj(nAnnotId);
{
GDALPDFDictionaryRW oDict;
oDict.Add("Type", GDALPDFObjectRW::CreateName("Annot"));
oDict.Add("Subtype", GDALPDFObjectRW::CreateName("Link"));
oDict.Add("Rect", &(new GDALPDFArrayRW())->Add(bboxXMin).Add(bboxYMin).Add(bboxXMax).Add(bboxYMax));
oDict.Add("A", &(new GDALPDFDictionaryRW())->
Add("S", GDALPDFObjectRW::CreateName("URI")).
Add("URI", pszLinkVal));
oDict.Add("BS", &(new GDALPDFDictionaryRW())->
Add("Type", GDALPDFObjectRW::CreateName("Border")).
Add("S", GDALPDFObjectRW::CreateName("S")).
Add("W", 0));
oDict.Add("Border", &(new GDALPDFArrayRW())->Add(0).Add(0).Add(0));
oDict.Add("H", GDALPDFObjectRW::CreateName("I"));
if( wkbFlatten(OGR_G_GetGeometryType(hGeom)) == wkbPolygon &&
OGR_G_GetGeometryCount(hGeom) == 1 )
{
OGRGeometryH hSubGeom = OGR_G_GetGeometryRef(hGeom, 0);
int nPoints = OGR_G_GetPointCount(hSubGeom);
if( nPoints == 4 || nPoints == 5 )
{
std::vector<double> adfX, adfY;
for(int i=0;i<nPoints;i++)
{
double dfX = OGR_G_GetX(hSubGeom, i) * adfMatrix[1] + adfMatrix[0];
double dfY = OGR_G_GetY(hSubGeom, i) * adfMatrix[3] + adfMatrix[2];
adfX.push_back(dfX);
adfY.push_back(dfY);
}
if( nPoints == 4 )
{
oDict.Add("QuadPoints", &(new GDALPDFArrayRW())->
Add(adfX[0]).Add(adfY[0]).
Add(adfX[1]).Add(adfY[1]).
Add(adfX[2]).Add(adfY[2]).
Add(adfX[0]).Add(adfY[0]));
}
else if( nPoints == 5 )
{
oDict.Add("QuadPoints", &(new GDALPDFArrayRW())->
Add(adfX[0]).Add(adfY[0]).
Add(adfX[1]).Add(adfY[1]).
Add(adfX[2]).Add(adfY[2]).
Add(adfX[3]).Add(adfY[3]));
}
}
}
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
}
EndObj();
}
StartObj(nObjectId);
{
GDALPDFDictionaryRW oDict;
GDALPDFArrayRW* poBBOX = new GDALPDFArrayRW();
poBBOX->Add(bboxXMin).Add(bboxYMin).Add(bboxXMax). Add(bboxYMax);
oDict.Add("Length", nObjectLengthId, 0)
.Add("Type", GDALPDFObjectRW::CreateName("XObject"))
.Add("BBox", poBBOX)
.Add("Subtype", GDALPDFObjectRW::CreateName("Form"));
if( oPageContext.eStreamCompressMethod != COMPRESS_NONE )
{
oDict.Add("Filter", GDALPDFObjectRW::CreateName("FlateDecode"));
}
GDALPDFDictionaryRW* poGS1 = new GDALPDFDictionaryRW();
poGS1->Add("Type", GDALPDFObjectRW::CreateName("ExtGState"));
if (nPenA != 255)
poGS1->Add("CA", (nPenA == 127 || nPenA == 128) ? 0.5 : nPenA / 255.0);
if (nBrushA != 255)
poGS1->Add("ca", (nBrushA == 127 || nBrushA == 128) ? 0.5 : nBrushA / 255.0 );
GDALPDFDictionaryRW* poExtGState = new GDALPDFDictionaryRW();
poExtGState->Add("GS1", poGS1);
GDALPDFDictionaryRW* poResources = new GDALPDFDictionaryRW();
poResources->Add("ExtGState", poExtGState);
if( nImageSymbolId != 0 )
{
GDALPDFDictionaryRW* poDictXObject = new GDALPDFDictionaryRW();
poResources->Add("XObject", poDictXObject);
poDictXObject->Add(CPLSPrintf("SymImage%d", nImageSymbolId), nImageSymbolId, 0);
}
oDict.Add("Resources", poResources);
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
}
/* -------------------------------------------------------------- */
/* Write object stream */
/* -------------------------------------------------------------- */
VSIFPrintfL(fp, "stream\n");
vsi_l_offset nStreamStart = VSIFTellL(fp);
VSILFILE* fpGZip = NULL;
VSILFILE* fpBack = fp;
if( oPageContext.eStreamCompressMethod != COMPRESS_NONE )
{
fpGZip = (VSILFILE* )VSICreateGZipWritable( (VSIVirtualHandle*) fp, TRUE, FALSE );
fp = fpGZip;
}
VSIFPrintfL(fp, "q\n");
VSIFPrintfL(fp, "/GS1 gs\n");
if (nImageSymbolId == 0)
{
VSIFPrintfL(fp, "%f w\n"
"0 J\n"
"0 j\n"
"10 M\n"
"[%s]0 d\n",
dfPenWidth,
osDashArray.c_str());
VSIFPrintfL(fp, "%f %f %f RG\n", nPenR / 255.0, nPenG / 255.0, nPenB / 255.0);
VSIFPrintfL(fp, "%f %f %f rg\n", nBrushR / 255.0, nBrushG / 255.0, nBrushB / 255.0);
}
if (wkbFlatten(OGR_G_GetGeometryType(hGeom)) == wkbPoint)
{
double dfX = OGR_G_GetX(hGeom, 0) * adfMatrix[1] + adfMatrix[0];
double dfY = OGR_G_GetY(hGeom, 0) * adfMatrix[3] + adfMatrix[2];
if (nImageSymbolId != 0)
{
VSIFPrintfL(fp, "%d 0 0 %d %f %f cm\n",
nImageWidth, nImageHeight,
dfX - nImageWidth / 2, dfY - nImageHeight / 2);
VSIFPrintfL(fp, "/SymImage%d Do\n", nImageSymbolId);
}
else if (osSymbolId == "")
osSymbolId = "ogr-sym-3"; /* symbol by default */
else if ( !(osSymbolId == "ogr-sym-0" ||
osSymbolId == "ogr-sym-1" ||
osSymbolId == "ogr-sym-2" ||
osSymbolId == "ogr-sym-3" ||
osSymbolId == "ogr-sym-4" ||
osSymbolId == "ogr-sym-5" ||
osSymbolId == "ogr-sym-6" ||
osSymbolId == "ogr-sym-7" ||
osSymbolId == "ogr-sym-8" ||
osSymbolId == "ogr-sym-9") )
{
CPLDebug("PDF", "Unhandled symbol id : %s. Using ogr-sym-3 instead", osSymbolId.c_str());
osSymbolId = "ogr-sym-3";
}
if (osSymbolId == "ogr-sym-0") /* cross (+) */
{
VSIFPrintfL(fp, "%f %f m\n", dfX - dfRadius, dfY);
VSIFPrintfL(fp, "%f %f l\n", dfX + dfRadius, dfY);
VSIFPrintfL(fp, "%f %f m\n", dfX, dfY - dfRadius);
VSIFPrintfL(fp, "%f %f l\n", dfX, dfY + dfRadius);
VSIFPrintfL(fp, "S\n");
}
else if (osSymbolId == "ogr-sym-1") /* diagcross (X) */
{
VSIFPrintfL(fp, "%f %f m\n", dfX - dfRadius, dfY - dfRadius);
VSIFPrintfL(fp, "%f %f l\n", dfX + dfRadius, dfY + dfRadius);
VSIFPrintfL(fp, "%f %f m\n", dfX - dfRadius, dfY + dfRadius);
VSIFPrintfL(fp, "%f %f l\n", dfX + dfRadius, dfY - dfRadius);
VSIFPrintfL(fp, "S\n");
}
else if (osSymbolId == "ogr-sym-2" ||
osSymbolId == "ogr-sym-3") /* circle */
{
/* See http://www.whizkidtech.redprince.net/bezier/circle/kappa/ */
const double dfKappa = 0.5522847498;
VSIFPrintfL(fp, "%f %f m\n", dfX - dfRadius, dfY);
VSIFPrintfL(fp, "%f %f %f %f %f %f c\n",
dfX - dfRadius, dfY - dfRadius * dfKappa,
dfX - dfRadius * dfKappa, dfY - dfRadius,
dfX, dfY - dfRadius);
VSIFPrintfL(fp, "%f %f %f %f %f %f c\n",
dfX + dfRadius * dfKappa, dfY - dfRadius,
dfX + dfRadius, dfY - dfRadius * dfKappa,
dfX + dfRadius, dfY);
VSIFPrintfL(fp, "%f %f %f %f %f %f c\n",
dfX + dfRadius, dfY + dfRadius * dfKappa,
dfX + dfRadius * dfKappa, dfY + dfRadius,
dfX, dfY + dfRadius);
VSIFPrintfL(fp, "%f %f %f %f %f %f c\n",
dfX - dfRadius * dfKappa, dfY + dfRadius,
dfX - dfRadius, dfY + dfRadius * dfKappa,
dfX - dfRadius, dfY);
if (osSymbolId == "ogr-sym-2")
VSIFPrintfL(fp, "s\n"); /* not filled */
else
VSIFPrintfL(fp, "b*\n"); /* filled */
}
else if (osSymbolId == "ogr-sym-4" ||
osSymbolId == "ogr-sym-5") /* square */
{
VSIFPrintfL(fp, "%f %f m\n", dfX - dfRadius, dfY + dfRadius);
VSIFPrintfL(fp, "%f %f l\n", dfX + dfRadius, dfY + dfRadius);
VSIFPrintfL(fp, "%f %f l\n", dfX + dfRadius, dfY - dfRadius);
VSIFPrintfL(fp, "%f %f l\n", dfX - dfRadius, dfY - dfRadius);
if (osSymbolId == "ogr-sym-4")
VSIFPrintfL(fp, "s\n"); /* not filled */
else
VSIFPrintfL(fp, "b*\n"); /* filled */
}
else if (osSymbolId == "ogr-sym-6" ||
osSymbolId == "ogr-sym-7") /* triangle */
{
const double dfSqrt3 = 1.73205080757;
VSIFPrintfL(fp, "%f %f m\n", dfX - dfRadius, dfY - dfRadius * dfSqrt3 / 3);
VSIFPrintfL(fp, "%f %f l\n", dfX, dfY + 2 * dfRadius * dfSqrt3 / 3);
VSIFPrintfL(fp, "%f %f l\n", dfX + dfRadius, dfY - dfRadius * dfSqrt3 / 3);
if (osSymbolId == "ogr-sym-6")
VSIFPrintfL(fp, "s\n"); /* not filled */
else
VSIFPrintfL(fp, "b*\n"); /* filled */
}
else if (osSymbolId == "ogr-sym-8" ||
osSymbolId == "ogr-sym-9") /* star */
{
const double dfSin18divSin126 = 0.38196601125;
VSIFPrintfL(fp, "%f %f m\n", dfX, dfY + dfRadius);
for(int i=1; i<10;i++)
{
double dfFactor = ((i % 2) == 1) ? dfSin18divSin126 : 1.0;
VSIFPrintfL(fp, "%f %f l\n",
dfX + cos(M_PI / 2 - i * M_PI * 36 / 180) * dfRadius * dfFactor,
dfY + sin(M_PI / 2 - i * M_PI * 36 / 180) * dfRadius * dfFactor);
}
if (osSymbolId == "ogr-sym-8")
VSIFPrintfL(fp, "s\n"); /* not filled */
else
VSIFPrintfL(fp, "b*\n"); /* filled */
}
}
else
{
DrawGeometry(fp, hGeom, adfMatrix);
}
VSIFPrintfL(fp, "Q");
if (fpGZip)
VSIFCloseL(fpGZip);
fp = fpBack;
vsi_l_offset nStreamEnd = VSIFTellL(fp);
VSIFPrintfL(fp, "\n");
VSIFPrintfL(fp, "endstream\n");
EndObj();
StartObj(nObjectLengthId);
VSIFPrintfL(fp,
" %ld\n",
(long)(nStreamEnd - nStreamStart));
EndObj();
/* -------------------------------------------------------------- */
/* Write label */
/* -------------------------------------------------------------- */
if (osLabelText.size() && wkbFlatten(OGR_G_GetGeometryType(hGeom)) == wkbPoint)
{
if (osVectorDesc.nOCGTextId == 0)
osVectorDesc.nOCGTextId = WriteOCG("Text", osVectorDesc.nOGCId);
/* -------------------------------------------------------------- */
/* Write object dictionary */
/* -------------------------------------------------------------- */
nObjectId = AllocNewObject();
nObjectLengthId = AllocNewObject();
osVectorDesc.aIdsText.push_back(nObjectId);
StartObj(nObjectId);
{
GDALPDFDictionaryRW oDict;
GDALDataset* poClippingDS = oPageContext.poClippingDS;
int nWidth = poClippingDS->GetRasterXSize();
int nHeight = poClippingDS->GetRasterYSize();
double dfUserUnit = oPageContext.dfDPI * USER_UNIT_IN_INCH;
double dfWidthInUserUnit = nWidth / dfUserUnit + oPageContext.sMargins.nLeft + oPageContext.sMargins.nRight;
double dfHeightInUserUnit = nHeight / dfUserUnit + oPageContext.sMargins.nBottom + oPageContext.sMargins.nTop;
oDict.Add("Length", nObjectLengthId, 0)
.Add("Type", GDALPDFObjectRW::CreateName("XObject"))
.Add("BBox", &((new GDALPDFArrayRW())
->Add(0).Add(0)).Add(dfWidthInUserUnit).Add(dfHeightInUserUnit))
.Add("Subtype", GDALPDFObjectRW::CreateName("Form"));
if( oPageContext.eStreamCompressMethod != COMPRESS_NONE )
{
oDict.Add("Filter", GDALPDFObjectRW::CreateName("FlateDecode"));
}
GDALPDFDictionaryRW* poResources = new GDALPDFDictionaryRW();
if (nTextA != 255)
{
GDALPDFDictionaryRW* poGS1 = new GDALPDFDictionaryRW();
poGS1->Add("Type", GDALPDFObjectRW::CreateName("ExtGState"));
poGS1->Add("ca", (nTextA == 127 || nTextA == 128) ? 0.5 : nTextA / 255.0);
GDALPDFDictionaryRW* poExtGState = new GDALPDFDictionaryRW();
poExtGState->Add("GS1", poGS1);
poResources->Add("ExtGState", poExtGState);
}
GDALPDFDictionaryRW* poDictFTimesRoman = NULL;
poDictFTimesRoman = new GDALPDFDictionaryRW();
poDictFTimesRoman->Add("Type", GDALPDFObjectRW::CreateName("Font"));
poDictFTimesRoman->Add("BaseFont", GDALPDFObjectRW::CreateName("Times-Roman"));
poDictFTimesRoman->Add("Encoding", GDALPDFObjectRW::CreateName("WinAnsiEncoding"));
poDictFTimesRoman->Add("Subtype", GDALPDFObjectRW::CreateName("Type1"));
GDALPDFDictionaryRW* poDictFont = new GDALPDFDictionaryRW();
if (poDictFTimesRoman)
poDictFont->Add("FTimesRoman", poDictFTimesRoman);
poResources->Add("Font", poDictFont);
oDict.Add("Resources", poResources);
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
}
/* -------------------------------------------------------------- */
/* Write object stream */
/* -------------------------------------------------------------- */
VSIFPrintfL(fp, "stream\n");
vsi_l_offset nStreamStart = VSIFTellL(fp);
VSILFILE* fpGZip = NULL;
VSILFILE* fpBack = fp;
if( oPageContext.eStreamCompressMethod != COMPRESS_NONE )
{
fpGZip = (VSILFILE* )VSICreateGZipWritable( (VSIVirtualHandle*) fp, TRUE, FALSE );
fp = fpGZip;
}
double dfX = OGR_G_GetX(hGeom, 0) * adfMatrix[1] + adfMatrix[0] + dfTextDx;
double dfY = OGR_G_GetY(hGeom, 0) * adfMatrix[3] + adfMatrix[2] + dfTextDy;
VSIFPrintfL(fp, "q\n");
VSIFPrintfL(fp, "BT\n");
if (nTextA != 255)
{
VSIFPrintfL(fp, "/GS1 gs\n");
}
if (dfTextAngle == 0)
{
VSIFPrintfL(fp, "%f %f Td\n", dfX, dfY);
}
else
{
dfTextAngle = - dfTextAngle * M_PI / 180.0;
VSIFPrintfL(fp, "%f %f %f %f %f %f Tm\n",
cos(dfTextAngle), -sin(dfTextAngle),
sin(dfTextAngle), cos(dfTextAngle),
dfX, dfY);
}
VSIFPrintfL(fp, "%f %f %f rg\n", nTextR / 255.0, nTextG / 255.0, nTextB / 255.0);
VSIFPrintfL(fp, "/FTimesRoman %f Tf\n", dfTextSize);
VSIFPrintfL(fp, "(");
for(size_t i=0;i<osLabelText.size();i++)
{
/*if (osLabelText[i] == '\n')
VSIFPrintfL(fp, ") Tj T* (");
else */
/* Tautology. Always true. */
/* if (osLabelText[i] >= 32 && osLabelText[i] <= 127) { */
VSIFPrintfL(fp, "%c", osLabelText[i]);
/* } else {
VSIFPrintfL(fp, "_");
} */
}
VSIFPrintfL(fp, ") Tj\n");
VSIFPrintfL(fp, "ET\n");
VSIFPrintfL(fp, "Q");
if (fpGZip)
VSIFCloseL(fpGZip);
fp = fpBack;
vsi_l_offset nStreamEnd = VSIFTellL(fp);
VSIFPrintfL(fp, "\n");
VSIFPrintfL(fp, "endstream\n");
EndObj();
StartObj(nObjectLengthId);
VSIFPrintfL(fp,
" %ld\n",
(long)(nStreamEnd - nStreamStart));
EndObj();
}
else
{
osVectorDesc.aIdsText.push_back(0);
}
/* -------------------------------------------------------------- */
/* Write feature attributes */
/* -------------------------------------------------------------- */
int nFeatureUserProperties = 0;
CPLString osFeatureName;
if (bWriteOGRAttributes)
{
int iField = -1;
if (pszOGRDisplayField &&
(iField = OGR_FD_GetFieldIndex(OGR_F_GetDefnRef(hFeat), pszOGRDisplayField)) >= 0)
osFeatureName = OGR_F_GetFieldAsString(hFeat, iField);
else
osFeatureName = CPLSPrintf("feature%d", iObjLayer + 1);
nFeatureUserProperties = AllocNewObject();
StartObj(nFeatureUserProperties);
GDALPDFDictionaryRW oDict;
GDALPDFDictionaryRW* poDictA = new GDALPDFDictionaryRW();
oDict.Add("A", poDictA);
poDictA->Add("O", GDALPDFObjectRW::CreateName("UserProperties"));
int nFields = OGR_F_GetFieldCount(hFeat);
GDALPDFArrayRW* poArray = new GDALPDFArrayRW();
for(int i = 0; i < nFields; i++)
{
if (OGR_F_IsFieldSet(hFeat, i))
{
OGRFieldDefnH hFDefn = OGR_F_GetFieldDefnRef( hFeat, i );
GDALPDFDictionaryRW* poKV = new GDALPDFDictionaryRW();
poKV->Add("N", OGR_Fld_GetNameRef(hFDefn));
if (OGR_Fld_GetType(hFDefn) == OFTInteger)
poKV->Add("V", OGR_F_GetFieldAsInteger(hFeat, i));
else if (OGR_Fld_GetType(hFDefn) == OFTReal)
poKV->Add("V", OGR_F_GetFieldAsDouble(hFeat, i));
else
poKV->Add("V", OGR_F_GetFieldAsString(hFeat, i));
poArray->Add(poKV);
}
}
poDictA->Add("P", poArray);
oDict.Add("K", iObj);
oDict.Add("P", osVectorDesc.nFeatureLayerId, 0);
oDict.Add("Pg", oPageContext.nPageId, 0);
oDict.Add("S", GDALPDFObjectRW::CreateName("feature"));
oDict.Add("T", osFeatureName);
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
EndObj();
}
iObj ++;
iObjLayer ++;
osVectorDesc.aUserPropertiesIds.push_back(nFeatureUserProperties);
osVectorDesc.aFeatureNames.push_back(osFeatureName);
return TRUE;
}
#endif
/************************************************************************/
/* EndPage() */
/************************************************************************/
int GDALPDFWriter::EndPage(const char* pszExtraImages,
const char* pszExtraStream,
const char* pszExtraLayerName,
const char* pszOffLayers,
const char* pszExclusiveLayers)
{
int nLayerExtraId = WriteOCG(pszExtraLayerName);
if( pszOffLayers )
osOffLayers = pszOffLayers;
if( pszExclusiveLayers )
osExclusiveLayers = pszExclusiveLayers;
int bHasTimesRoman = pszExtraStream && strstr(pszExtraStream, "/FTimesRoman");
int bHasTimesBold = pszExtraStream && strstr(pszExtraStream, "/FTimesBold");
/* -------------------------------------------------------------- */
/* Write extra images */
/* -------------------------------------------------------------- */
std::vector<GDALPDFImageDesc> asExtraImageDesc;
if (pszExtraImages)
{
if( GDALGetDriverCount() == 0 )
GDALAllRegister();
char** papszExtraImagesTokens = CSLTokenizeString2(pszExtraImages, ",", 0);
double dfUserUnit = oPageContext.dfDPI * USER_UNIT_IN_INCH;
int nCount = CSLCount(papszExtraImagesTokens);
for(int i=0;i+4<=nCount; /* */)
{
const char* pszImageFilename = papszExtraImagesTokens[i+0];
double dfX = CPLAtof(papszExtraImagesTokens[i+1]);
double dfY = CPLAtof(papszExtraImagesTokens[i+2]);
double dfScale = CPLAtof(papszExtraImagesTokens[i+3]);
const char* pszLinkVal = NULL;
i += 4;
if( i < nCount && EQUALN(papszExtraImagesTokens[i],"link=",5) )
{
pszLinkVal = papszExtraImagesTokens[i] + 5;
i++;
}
GDALDataset* poImageDS = (GDALDataset* )GDALOpen(pszImageFilename, GA_ReadOnly);
if (poImageDS)
{
int nImageId = WriteBlock( poImageDS,
0, 0,
poImageDS->GetRasterXSize(),
poImageDS->GetRasterYSize(),
0,
COMPRESS_DEFAULT,
0,
-1,
NULL,
NULL,
NULL );
if (nImageId)
{
GDALPDFImageDesc oImageDesc;
oImageDesc.nImageId = nImageId;
oImageDesc.dfXSize = poImageDS->GetRasterXSize() / dfUserUnit * dfScale;
oImageDesc.dfYSize = poImageDS->GetRasterYSize() / dfUserUnit * dfScale;
oImageDesc.dfXOff = dfX;
oImageDesc.dfYOff = dfY;
asExtraImageDesc.push_back(oImageDesc);
if( pszLinkVal != NULL )
{
int nAnnotId = AllocNewObject();
oPageContext.anAnnotationsId.push_back(nAnnotId);
StartObj(nAnnotId);
{
GDALPDFDictionaryRW oDict;
oDict.Add("Type", GDALPDFObjectRW::CreateName("Annot"));
oDict.Add("Subtype", GDALPDFObjectRW::CreateName("Link"));
oDict.Add("Rect", &(new GDALPDFArrayRW())->
Add(oImageDesc.dfXOff).
Add(oImageDesc.dfYOff).
Add(oImageDesc.dfXOff + oImageDesc.dfXSize).
Add(oImageDesc.dfYOff + oImageDesc.dfYSize));
oDict.Add("A", &(new GDALPDFDictionaryRW())->
Add("S", GDALPDFObjectRW::CreateName("URI")).
Add("URI", pszLinkVal));
oDict.Add("BS", &(new GDALPDFDictionaryRW())->
Add("Type", GDALPDFObjectRW::CreateName("Border")).
Add("S", GDALPDFObjectRW::CreateName("S")).
Add("W", 0));
oDict.Add("Border", &(new GDALPDFArrayRW())->Add(0).Add(0).Add(0));
oDict.Add("H", GDALPDFObjectRW::CreateName("I"));
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
}
EndObj();
}
}
GDALClose(poImageDS);
}
}
CSLDestroy(papszExtraImagesTokens);
}
/* -------------------------------------------------------------- */
/* Write content dictionary */
/* -------------------------------------------------------------- */
int nContentLengthId = AllocNewObject();
StartObj(oPageContext.nContentId);
{
GDALPDFDictionaryRW oDict;
oDict.Add("Length", nContentLengthId, 0);
if( oPageContext.eStreamCompressMethod != COMPRESS_NONE )
{
oDict.Add("Filter", GDALPDFObjectRW::CreateName("FlateDecode"));
}
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
}
/* -------------------------------------------------------------- */
/* Write content stream */
/* -------------------------------------------------------------- */
VSIFPrintfL(fp, "stream\n");
vsi_l_offset nStreamStart = VSIFTellL(fp);
VSILFILE* fpGZip = NULL;
VSILFILE* fpBack = fp;
if( oPageContext.eStreamCompressMethod != COMPRESS_NONE )
{
fpGZip = (VSILFILE* )VSICreateGZipWritable( (VSIVirtualHandle*) fp, TRUE, FALSE );
fp = fpGZip;
}
/* -------------------------------------------------------------- */
/* Write drawing instructions for raster blocks */
/* -------------------------------------------------------------- */
for(size_t iRaster = 0; iRaster < oPageContext.asRasterDesc.size(); iRaster++)
{
const GDALPDFRasterDesc& oDesc = oPageContext.asRasterDesc[iRaster];
if (oDesc.nOCGRasterId)
VSIFPrintfL(fp, "/OC /Lyr%d BDC\n", oDesc.nOCGRasterId);
for(size_t iImage = 0; iImage < oDesc.asImageDesc.size(); iImage ++)
{
VSIFPrintfL(fp, "q\n");
GDALPDFObjectRW* poXSize = GDALPDFObjectRW::CreateReal(oDesc.asImageDesc[iImage].dfXSize);
GDALPDFObjectRW* poYSize = GDALPDFObjectRW::CreateReal(oDesc.asImageDesc[iImage].dfYSize);
GDALPDFObjectRW* poXOff = GDALPDFObjectRW::CreateReal(oDesc.asImageDesc[iImage].dfXOff);
GDALPDFObjectRW* poYOff = GDALPDFObjectRW::CreateReal(oDesc.asImageDesc[iImage].dfYOff);
VSIFPrintfL(fp, "%s 0 0 %s %s %s cm\n",
poXSize->Serialize().c_str(),
poYSize->Serialize().c_str(),
poXOff->Serialize().c_str(),
poYOff->Serialize().c_str());
delete poXSize;
delete poYSize;
delete poXOff;
delete poYOff;
VSIFPrintfL(fp, "/Image%d Do\n",
oDesc.asImageDesc[iImage].nImageId);
VSIFPrintfL(fp, "Q\n");
}
if (oDesc.nOCGRasterId)
VSIFPrintfL(fp, "EMC\n");
}
/* -------------------------------------------------------------- */
/* Write drawing instructions for vector features */
/* -------------------------------------------------------------- */
int iObj = 0;
for(size_t iLayer = 0; iLayer < oPageContext.asVectorDesc.size(); iLayer ++)
{
GDALPDFLayerDesc& oLayerDesc = oPageContext.asVectorDesc[iLayer];
VSIFPrintfL(fp, "/OC /Lyr%d BDC\n", oLayerDesc.nOGCId);
for(size_t iVector = 0; iVector < oLayerDesc.aIds.size(); iVector ++)
{
CPLString osName = oLayerDesc.aFeatureNames[iVector];
if (osName.size())
{
VSIFPrintfL(fp, "/feature <</MCID %d>> BDC\n",
iObj);
}
iObj ++;
VSIFPrintfL(fp, "/Vector%d Do\n", oLayerDesc.aIds[iVector]);
if (osName.size())
{
VSIFPrintfL(fp, "EMC\n");
}
}
VSIFPrintfL(fp, "EMC\n");
}
/* -------------------------------------------------------------- */
/* Write drawing instructions for labels of vector features */
/* -------------------------------------------------------------- */
iObj = 0;
for(size_t iLayer = 0; iLayer < oPageContext.asVectorDesc.size(); iLayer ++)
{
GDALPDFLayerDesc& oLayerDesc = oPageContext.asVectorDesc[iLayer];
if (oLayerDesc.nOCGTextId)
{
VSIFPrintfL(fp, "/OC /Lyr%d BDC\n", oLayerDesc.nOGCId);
VSIFPrintfL(fp, "/OC /Lyr%d BDC\n", oLayerDesc.nOCGTextId);
for(size_t iVector = 0; iVector < oLayerDesc.aIds.size(); iVector ++)
{
if (oLayerDesc.aIdsText[iVector])
{
CPLString osName = oLayerDesc.aFeatureNames[iVector];
if (osName.size())
{
VSIFPrintfL(fp, "/feature <</MCID %d>> BDC\n",
iObj);
}
VSIFPrintfL(fp, "/Text%d Do\n", oLayerDesc.aIdsText[iVector]);
if (osName.size())
{
VSIFPrintfL(fp, "EMC\n");
}
}
iObj ++;
}
VSIFPrintfL(fp, "EMC\n");
VSIFPrintfL(fp, "EMC\n");
}
else
iObj += (int) oLayerDesc.aIds.size();
}
/* -------------------------------------------------------------- */
/* Write drawing instructions for extra content. */
/* -------------------------------------------------------------- */
if (pszExtraStream || asExtraImageDesc.size())
{
if (nLayerExtraId)
VSIFPrintfL(fp, "/OC /Lyr%d BDC\n", nLayerExtraId);
/* -------------------------------------------------------------- */
/* Write drawing instructions for extra images. */
/* -------------------------------------------------------------- */
for(size_t iImage = 0; iImage < asExtraImageDesc.size(); iImage ++)
{
VSIFPrintfL(fp, "q\n");
GDALPDFObjectRW* poXSize = GDALPDFObjectRW::CreateReal(asExtraImageDesc[iImage].dfXSize);
GDALPDFObjectRW* poYSize = GDALPDFObjectRW::CreateReal(asExtraImageDesc[iImage].dfYSize);
GDALPDFObjectRW* poXOff = GDALPDFObjectRW::CreateReal(asExtraImageDesc[iImage].dfXOff);
GDALPDFObjectRW* poYOff = GDALPDFObjectRW::CreateReal(asExtraImageDesc[iImage].dfYOff);
VSIFPrintfL(fp, "%s 0 0 %s %s %s cm\n",
poXSize->Serialize().c_str(),
poYSize->Serialize().c_str(),
poXOff->Serialize().c_str(),
poYOff->Serialize().c_str());
delete poXSize;
delete poYSize;
delete poXOff;
delete poYOff;
VSIFPrintfL(fp, "/Image%d Do\n",
asExtraImageDesc[iImage].nImageId);
VSIFPrintfL(fp, "Q\n");
}
if (pszExtraStream)
VSIFPrintfL(fp, "%s\n", pszExtraStream);
if (nLayerExtraId)
VSIFPrintfL(fp, "EMC\n");
}
if (fpGZip)
VSIFCloseL(fpGZip);
fp = fpBack;
vsi_l_offset nStreamEnd = VSIFTellL(fp);
if (fpGZip)
VSIFPrintfL(fp, "\n");
VSIFPrintfL(fp, "endstream\n");
EndObj();
StartObj(nContentLengthId);
VSIFPrintfL(fp,
" %ld\n",
(long)(nStreamEnd - nStreamStart));
EndObj();
/* -------------------------------------------------------------- */
/* Write objects for feature tree. */
/* -------------------------------------------------------------- */
if (nStructTreeRootId)
{
int nParentTreeId = AllocNewObject();
StartObj(nParentTreeId);
VSIFPrintfL(fp, "<< /Nums [ 0 ");
VSIFPrintfL(fp, "[ ");
for(size_t iLayer = 0; iLayer < oPageContext.asVectorDesc.size(); iLayer ++)
{
GDALPDFLayerDesc& oLayerDesc = oPageContext.asVectorDesc[iLayer];
for(size_t iVector = 0; iVector < oLayerDesc.aIds.size(); iVector ++)
{
int nId = oLayerDesc.aUserPropertiesIds[iVector];
if (nId)
VSIFPrintfL(fp, "%d 0 R ", nId);
}
}
VSIFPrintfL(fp, " ]\n");
VSIFPrintfL(fp, " ] >> \n");
EndObj();
StartObj(nStructTreeRootId);
VSIFPrintfL(fp,
"<< "
"/Type /StructTreeRoot "
"/ParentTree %d 0 R "
"/K [ ", nParentTreeId);
for(size_t iLayer = 0; iLayer < oPageContext.asVectorDesc.size(); iLayer ++)
{
VSIFPrintfL(fp, "%d 0 R ", oPageContext.asVectorDesc[iLayer]. nFeatureLayerId);
}
VSIFPrintfL(fp,"] >>\n");
EndObj();
}
/* -------------------------------------------------------------- */
/* Write page resource dictionary. */
/* -------------------------------------------------------------- */
StartObj(oPageContext.nResourcesId);
{
GDALPDFDictionaryRW oDict;
GDALPDFDictionaryRW* poDictXObject = new GDALPDFDictionaryRW();
oDict.Add("XObject", poDictXObject);
size_t iImage;
for(size_t iRaster = 0; iRaster < oPageContext.asRasterDesc.size(); iRaster++)
{
const GDALPDFRasterDesc& oDesc = oPageContext.asRasterDesc[iRaster];
for(iImage = 0; iImage < oDesc.asImageDesc.size(); iImage ++)
{
poDictXObject->Add(CPLSPrintf("Image%d", oDesc.asImageDesc[iImage].nImageId),
oDesc.asImageDesc[iImage].nImageId, 0);
}
}
for(iImage = 0; iImage < asExtraImageDesc.size(); iImage ++)
{
poDictXObject->Add(CPLSPrintf("Image%d", asExtraImageDesc[iImage].nImageId),
asExtraImageDesc[iImage].nImageId, 0);
}
for(size_t iLayer = 0; iLayer < oPageContext.asVectorDesc.size(); iLayer ++)
{
GDALPDFLayerDesc& oLayerDesc = oPageContext.asVectorDesc[iLayer];
for(size_t iVector = 0; iVector < oLayerDesc.aIds.size(); iVector ++)
{
poDictXObject->Add(CPLSPrintf("Vector%d", oLayerDesc.aIds[iVector]),
oLayerDesc.aIds[iVector], 0);
if (oLayerDesc.aIdsText[iVector])
poDictXObject->Add(CPLSPrintf("Text%d", oLayerDesc.aIdsText[iVector]),
oLayerDesc.aIdsText[iVector], 0);
}
}
GDALPDFDictionaryRW* poDictFTimesRoman = NULL;
if (bHasTimesRoman)
{
poDictFTimesRoman = new GDALPDFDictionaryRW();
poDictFTimesRoman->Add("Type", GDALPDFObjectRW::CreateName("Font"));
poDictFTimesRoman->Add("BaseFont", GDALPDFObjectRW::CreateName("Times-Roman"));
poDictFTimesRoman->Add("Encoding", GDALPDFObjectRW::CreateName("WinAnsiEncoding"));
poDictFTimesRoman->Add("Subtype", GDALPDFObjectRW::CreateName("Type1"));
}
GDALPDFDictionaryRW* poDictFTimesBold = NULL;
if (bHasTimesBold)
{
poDictFTimesBold = new GDALPDFDictionaryRW();
poDictFTimesBold->Add("Type", GDALPDFObjectRW::CreateName("Font"));
poDictFTimesBold->Add("BaseFont", GDALPDFObjectRW::CreateName("Times-Bold"));
poDictFTimesBold->Add("Encoding", GDALPDFObjectRW::CreateName("WinAnsiEncoding"));
poDictFTimesBold->Add("Subtype", GDALPDFObjectRW::CreateName("Type1"));
}
if (poDictFTimesRoman != NULL || poDictFTimesBold != NULL)
{
GDALPDFDictionaryRW* poDictFont = new GDALPDFDictionaryRW();
if (poDictFTimesRoman)
poDictFont->Add("FTimesRoman", poDictFTimesRoman);
if (poDictFTimesBold)
poDictFont->Add("FTimesBold", poDictFTimesBold);
oDict.Add("Font", poDictFont);
}
if (asOCGs.size())
{
GDALPDFDictionaryRW* poDictProperties = new GDALPDFDictionaryRW();
for(size_t i=0; i<asOCGs.size(); i++)
poDictProperties->Add(CPLSPrintf("Lyr%d", asOCGs[i].nId),
asOCGs[i].nId, 0);
oDict.Add("Properties", poDictProperties);
}
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
}
EndObj();
/* -------------------------------------------------------------- */
/* Write annotation arrays. */
/* -------------------------------------------------------------- */
StartObj(oPageContext.nAnnotsId);
{
GDALPDFArrayRW oArray;
for(size_t i = 0; i < oPageContext.anAnnotationsId.size(); i++)
{
oArray.Add(oPageContext.anAnnotationsId[i], 0);
}
VSIFPrintfL(fp, "%s\n", oArray.Serialize().c_str());
}
EndObj();
return TRUE;
}
/************************************************************************/
/* WriteMask() */
/************************************************************************/
int GDALPDFWriter::WriteMask(GDALDataset* poSrcDS,
int nXOff, int nYOff, int nReqXSize, int nReqYSize,
PDFCompressMethod eCompressMethod)
{
int nMaskSize = nReqXSize * nReqYSize;
GByte* pabyMask = (GByte*)VSIMalloc(nMaskSize);
if (pabyMask == NULL)
return 0;
CPLErr eErr;
eErr = poSrcDS->GetRasterBand(4)->RasterIO(
GF_Read,
nXOff, nYOff,
nReqXSize, nReqYSize,
pabyMask, nReqXSize, nReqYSize, GDT_Byte,
0, 0, NULL);
if (eErr != CE_None)
{
VSIFree(pabyMask);
return 0;
}
int bOnly0or255 = TRUE;
int bOnly255 = TRUE;
/* int bOnly0 = TRUE; */
int i;
for(i=0;i<nReqXSize * nReqYSize;i++)
{
if (pabyMask[i] == 0)
bOnly255 = FALSE;
else if (pabyMask[i] == 255)
{
/* bOnly0 = FALSE; */
}
else
{
/* bOnly0 = FALSE; */
bOnly255 = FALSE;
bOnly0or255 = FALSE;
break;
}
}
if (bOnly255)
{
CPLFree(pabyMask);
return 0;
}
if (bOnly0or255)
{
/* Translate to 1 bit */
int nReqXSize1 = (nReqXSize + 7) / 8;
GByte* pabyMask1 = (GByte*)VSICalloc(nReqXSize1, nReqYSize);
if (pabyMask1 == NULL)
{
CPLFree(pabyMask);
return 0;
}
for(int y=0;y<nReqYSize;y++)
{
for(int x=0;x<nReqXSize;x++)
{
if (pabyMask[y * nReqXSize + x])
pabyMask1[y * nReqXSize1 + x / 8] |= 1 << (7 - (x % 8));
}
}
VSIFree(pabyMask);
pabyMask = pabyMask1;
nMaskSize = nReqXSize1 * nReqYSize;
}
int nMaskId = AllocNewObject();
int nMaskLengthId = AllocNewObject();
StartObj(nMaskId);
GDALPDFDictionaryRW oDict;
oDict.Add("Length", nMaskLengthId, 0)
.Add("Type", GDALPDFObjectRW::CreateName("XObject"));
if( eCompressMethod != COMPRESS_NONE )
{
oDict.Add("Filter", GDALPDFObjectRW::CreateName("FlateDecode"));
}
oDict.Add("Subtype", GDALPDFObjectRW::CreateName("Image"))
.Add("Width", nReqXSize)
.Add("Height", nReqYSize)
.Add("ColorSpace", GDALPDFObjectRW::CreateName("DeviceGray"))
.Add("BitsPerComponent", (bOnly0or255) ? 1 : 8);
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
VSIFPrintfL(fp, "stream\n");
vsi_l_offset nStreamStart = VSIFTellL(fp);
VSILFILE* fpGZip = NULL;
VSILFILE* fpBack = fp;
if( eCompressMethod != COMPRESS_NONE )
{
fpGZip = (VSILFILE* )VSICreateGZipWritable( (VSIVirtualHandle*) fp, TRUE, FALSE );
fp = fpGZip;
}
VSIFWriteL(pabyMask, nMaskSize, 1, fp);
CPLFree(pabyMask);
if (fpGZip)
VSIFCloseL(fpGZip);
fp = fpBack;
vsi_l_offset nStreamEnd = VSIFTellL(fp);
VSIFPrintfL(fp,
"\n"
"endstream\n");
EndObj();
StartObj(nMaskLengthId);
VSIFPrintfL(fp,
" %ld\n",
(long)(nStreamEnd - nStreamStart));
EndObj();
return nMaskId;
}
/************************************************************************/
/* WriteBlock() */
/************************************************************************/
int GDALPDFWriter::WriteBlock(GDALDataset* poSrcDS,
int nXOff, int nYOff, int nReqXSize, int nReqYSize,
int nColorTableId,
PDFCompressMethod eCompressMethod,
int nPredictor,
int nJPEGQuality,
const char* pszJPEG2000_DRIVER,
GDALProgressFunc pfnProgress,
void * pProgressData)
{
int nBands = poSrcDS->GetRasterCount();
if (nBands == 0)
return 0;
if (nColorTableId == 0)
nColorTableId = WriteColorTable(poSrcDS);
CPLErr eErr = CE_None;
GDALDataset* poBlockSrcDS = NULL;
GDALDatasetH hMemDS = NULL;
GByte* pabyMEMDSBuffer = NULL;
if (eCompressMethod == COMPRESS_DEFAULT)
{
GDALDataset* poSrcDSToTest = poSrcDS;
/* Test if we can directly copy original JPEG content */
/* if available */
if (poSrcDS->GetDriver() != NULL &&
poSrcDS->GetDriver() == GDALGetDriverByName("VRT"))
{
VRTDataset* poVRTDS = (VRTDataset* )poSrcDS;
poSrcDSToTest = poVRTDS->GetSingleSimpleSource();
}
if (poSrcDSToTest != NULL &&
poSrcDSToTest->GetDriver() != NULL &&
EQUAL(poSrcDSToTest->GetDriver()->GetDescription(), "JPEG") &&
nXOff == 0 && nYOff == 0 &&
nReqXSize == poSrcDSToTest->GetRasterXSize() &&
nReqYSize == poSrcDSToTest->GetRasterYSize() &&
nJPEGQuality < 0)
{
VSILFILE* fpSrc = VSIFOpenL(poSrcDSToTest->GetDescription(), "rb");
if (fpSrc != NULL)
{
CPLDebug("PDF", "Copying directly original JPEG file");
VSIFSeekL(fpSrc, 0, SEEK_END);
int nLength = (int)VSIFTellL(fpSrc);
VSIFSeekL(fpSrc, 0, SEEK_SET);
int nImageId = AllocNewObject();
StartObj(nImageId);
GDALPDFDictionaryRW oDict;
oDict.Add("Length", nLength)
.Add("Type", GDALPDFObjectRW::CreateName("XObject"))
.Add("Filter", GDALPDFObjectRW::CreateName("DCTDecode"))
.Add("Subtype", GDALPDFObjectRW::CreateName("Image"))
.Add("Width", nReqXSize)
.Add("Height", nReqYSize)
.Add("ColorSpace",
(nBands == 1) ? GDALPDFObjectRW::CreateName("DeviceGray") :
GDALPDFObjectRW::CreateName("DeviceRGB"))
.Add("BitsPerComponent", 8);
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
VSIFPrintfL(fp, "stream\n");
GByte abyBuffer[1024];
for(int i=0;i<nLength;i += 1024)
{
int nRead = (int) VSIFReadL(abyBuffer, 1, 1024, fpSrc);
if ((int)VSIFWriteL(abyBuffer, 1, nRead, fp) != nRead)
{
eErr = CE_Failure;
break;
}
if( eErr == CE_None && pfnProgress != NULL
&& !pfnProgress( (i + nRead) / (double)nLength,
NULL, pProgressData ) )
{
CPLError( CE_Failure, CPLE_UserInterrupt,
"User terminated CreateCopy()" );
eErr = CE_Failure;
break;
}
}
VSIFPrintfL(fp, "\nendstream\n");
EndObj();
VSIFCloseL(fpSrc);
return eErr == CE_None ? nImageId : 0;
}
}
eCompressMethod = COMPRESS_DEFLATE;
}
int nMaskId = 0;
if (nBands == 4)
{
nMaskId = WriteMask(poSrcDS,
nXOff, nYOff, nReqXSize, nReqYSize,
eCompressMethod);
}
if( nReqXSize == poSrcDS->GetRasterXSize() &&
nReqYSize == poSrcDS->GetRasterYSize() &&
nBands != 4)
{
poBlockSrcDS = poSrcDS;
}
else
{
if (nBands == 4)
nBands = 3;
GDALDriverH hMemDriver = GDALGetDriverByName("MEM");
if( hMemDriver == NULL )
return 0;
hMemDS = GDALCreate(hMemDriver, "MEM:::",
nReqXSize, nReqYSize, 0,
GDT_Byte, NULL);
if (hMemDS == NULL)
return 0;
pabyMEMDSBuffer =
(GByte*)VSIMalloc3(nReqXSize, nReqYSize, nBands);
if (pabyMEMDSBuffer == NULL)
{
GDALClose(hMemDS);
return 0;
}
eErr = poSrcDS->RasterIO(GF_Read,
nXOff, nYOff,
nReqXSize, nReqYSize,
pabyMEMDSBuffer, nReqXSize, nReqYSize,
GDT_Byte, nBands, NULL,
0, 0, 0, NULL);
if( eErr != CE_None )
{
CPLFree(pabyMEMDSBuffer);
GDALClose(hMemDS);
return 0;
}
int iBand;
for(iBand = 0; iBand < nBands; iBand ++)
{
char** papszMEMDSOptions = NULL;
char szTmp[64];
memset(szTmp, 0, sizeof(szTmp));
CPLPrintPointer(szTmp,
pabyMEMDSBuffer + iBand * nReqXSize * nReqYSize, sizeof(szTmp));
papszMEMDSOptions = CSLSetNameValue(papszMEMDSOptions, "DATAPOINTER", szTmp);
GDALAddBand(hMemDS, GDT_Byte, papszMEMDSOptions);
CSLDestroy(papszMEMDSOptions);
}
poBlockSrcDS = (GDALDataset*) hMemDS;
}
int nImageId = AllocNewObject();
int nImageLengthId = AllocNewObject();
int nMeasureId = 0;
if( CSLTestBoolean(CPLGetConfigOption("GDAL_PDF_WRITE_GEOREF_ON_IMAGE", "FALSE")) &&
nReqXSize == poSrcDS->GetRasterXSize() &&
nReqYSize == poSrcDS->GetRasterYSize() )
{
PDFMargins sMargins = {0, 0, 0, 0};
nMeasureId = WriteSRS_ISO32000(poSrcDS, 1, NULL, &sMargins, FALSE);
}
StartObj(nImageId);
GDALPDFDictionaryRW oDict;
oDict.Add("Length", nImageLengthId, 0)
.Add("Type", GDALPDFObjectRW::CreateName("XObject"));
if( eCompressMethod == COMPRESS_DEFLATE )
{
oDict.Add("Filter", GDALPDFObjectRW::CreateName("FlateDecode"));
if( nPredictor == 2 )
oDict.Add("DecodeParms", &((new GDALPDFDictionaryRW())
->Add("Predictor", 2)
.Add("Colors", nBands)
.Add("Columns", nReqXSize)));
}
else if( eCompressMethod == COMPRESS_JPEG )
{
oDict.Add("Filter", GDALPDFObjectRW::CreateName("DCTDecode"));
}
else if( eCompressMethod == COMPRESS_JPEG2000 )
{
oDict.Add("Filter", GDALPDFObjectRW::CreateName("JPXDecode"));
}
oDict.Add("Subtype", GDALPDFObjectRW::CreateName("Image"))
.Add("Width", nReqXSize)
.Add("Height", nReqYSize)
.Add("ColorSpace",
(nColorTableId != 0) ? GDALPDFObjectRW::CreateIndirect(nColorTableId, 0) :
(nBands == 1) ? GDALPDFObjectRW::CreateName("DeviceGray") :
GDALPDFObjectRW::CreateName("DeviceRGB"))
.Add("BitsPerComponent", 8);
if( nMaskId )
{
oDict.Add("SMask", nMaskId, 0);
}
if( nMeasureId )
{
oDict.Add("Measure", nMeasureId, 0);
}
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
VSIFPrintfL(fp, "stream\n");
vsi_l_offset nStreamStart = VSIFTellL(fp);
if( eCompressMethod == COMPRESS_JPEG ||
eCompressMethod == COMPRESS_JPEG2000 )
{
GDALDriver* poJPEGDriver = NULL;
char szTmp[64];
char** papszOptions = NULL;
if( eCompressMethod == COMPRESS_JPEG )
{
poJPEGDriver = (GDALDriver*) GDALGetDriverByName("JPEG");
if (poJPEGDriver != NULL && nJPEGQuality > 0)
papszOptions = CSLAddString(papszOptions, CPLSPrintf("QUALITY=%d", nJPEGQuality));
sprintf(szTmp, "/vsimem/pdftemp/%p.jpg", this);
}
else
{
if (pszJPEG2000_DRIVER == NULL || EQUAL(pszJPEG2000_DRIVER, "JP2KAK"))
poJPEGDriver = (GDALDriver*) GDALGetDriverByName("JP2KAK");
if (poJPEGDriver == NULL)
{
if (pszJPEG2000_DRIVER == NULL || EQUAL(pszJPEG2000_DRIVER, "JP2ECW"))
{
poJPEGDriver = (GDALDriver*) GDALGetDriverByName("JP2ECW");
if( poJPEGDriver &&
poJPEGDriver->GetMetadataItem(GDAL_DMD_CREATIONDATATYPES) == NULL )
{
poJPEGDriver = NULL;
}
}
if (poJPEGDriver)
{
papszOptions = CSLAddString(papszOptions, "PROFILE=NPJE");
papszOptions = CSLAddString(papszOptions, "LAYERS=1");
papszOptions = CSLAddString(papszOptions, "GeoJP2=OFF");
papszOptions = CSLAddString(papszOptions, "GMLJP2=OFF");
}
}
if (poJPEGDriver == NULL)
{
if (pszJPEG2000_DRIVER == NULL || EQUAL(pszJPEG2000_DRIVER, "JP2OpenJPEG"))
poJPEGDriver = (GDALDriver*) GDALGetDriverByName("JP2OpenJPEG");
if (poJPEGDriver)
{
papszOptions = CSLAddString(papszOptions, "GeoJP2=OFF");
papszOptions = CSLAddString(papszOptions, "GMLJP2=OFF");
}
}
if (poJPEGDriver == NULL)
{
if (pszJPEG2000_DRIVER == NULL || EQUAL(pszJPEG2000_DRIVER, "JPEG2000"))
poJPEGDriver = (GDALDriver*) GDALGetDriverByName("JPEG2000");
}
sprintf(szTmp, "/vsimem/pdftemp/%p.jp2", this);
}
if( poJPEGDriver == NULL )
{
CPLError(CE_Failure, CPLE_NotSupported,
"No %s driver found",
( eCompressMethod == COMPRESS_JPEG ) ? "JPEG" : "JPEG2000");
eErr = CE_Failure;
goto end;
}
GDALDataset* poJPEGDS = NULL;
poJPEGDS = poJPEGDriver->CreateCopy(szTmp, poBlockSrcDS,
FALSE, papszOptions,
pfnProgress, pProgressData);
CSLDestroy(papszOptions);
if( poJPEGDS == NULL )
{
eErr = CE_Failure;
goto end;
}
GDALClose(poJPEGDS);
vsi_l_offset nJPEGDataSize = 0;
GByte* pabyJPEGData = VSIGetMemFileBuffer(szTmp, &nJPEGDataSize, TRUE);
VSIFWriteL(pabyJPEGData, nJPEGDataSize, 1, fp);
CPLFree(pabyJPEGData);
}
else
{
VSILFILE* fpGZip = NULL;
VSILFILE* fpBack = fp;
if( eCompressMethod == COMPRESS_DEFLATE )
{
fpGZip = (VSILFILE* )VSICreateGZipWritable( (VSIVirtualHandle*) fp, TRUE, FALSE );
fp = fpGZip;
}
GByte* pabyLine = (GByte*)CPLMalloc(nReqXSize * nBands);
for(int iLine = 0; iLine < nReqYSize; iLine ++)
{
/* Get pixel interleaved data */
eErr = poBlockSrcDS->RasterIO(GF_Read,
0, iLine, nReqXSize, 1,
pabyLine, nReqXSize, 1, GDT_Byte,
nBands, NULL, nBands, 0, 1, NULL);
if( eErr != CE_None )
break;
/* Apply predictor if needed */
if( nPredictor == 2 )
{
if( nBands == 1 )
{
int nPrevValue = pabyLine[0];
for(int iPixel = 1; iPixel < nReqXSize; iPixel ++)
{
int nCurValue = pabyLine[iPixel];
pabyLine[iPixel] = (GByte) (nCurValue - nPrevValue);
nPrevValue = nCurValue;
}
}
else if( nBands == 3 )
{
int nPrevValueR = pabyLine[0];
int nPrevValueG = pabyLine[1];
int nPrevValueB = pabyLine[2];
for(int iPixel = 1; iPixel < nReqXSize; iPixel ++)
{
int nCurValueR = pabyLine[3 * iPixel + 0];
int nCurValueG = pabyLine[3 * iPixel + 1];
int nCurValueB = pabyLine[3 * iPixel + 2];
pabyLine[3 * iPixel + 0] = (GByte) (nCurValueR - nPrevValueR);
pabyLine[3 * iPixel + 1] = (GByte) (nCurValueG - nPrevValueG);
pabyLine[3 * iPixel + 2] = (GByte) (nCurValueB - nPrevValueB);
nPrevValueR = nCurValueR;
nPrevValueG = nCurValueG;
nPrevValueB = nCurValueB;
}
}
}
if( VSIFWriteL(pabyLine, nReqXSize * nBands, 1, fp) != 1 )
{
eErr = CE_Failure;
break;
}
if( eErr == CE_None && pfnProgress != NULL
&& !pfnProgress( (iLine+1) / (double)nReqYSize,
NULL, pProgressData ) )
{
CPLError( CE_Failure, CPLE_UserInterrupt,
"User terminated CreateCopy()" );
eErr = CE_Failure;
break;
}
}
CPLFree(pabyLine);
if (fpGZip)
VSIFCloseL(fpGZip);
fp = fpBack;
}
end:
CPLFree(pabyMEMDSBuffer);
pabyMEMDSBuffer = NULL;
if( hMemDS != NULL )
{
GDALClose(hMemDS);
hMemDS = NULL;
}
vsi_l_offset nStreamEnd = VSIFTellL(fp);
VSIFPrintfL(fp,
"\n"
"endstream\n");
EndObj();
StartObj(nImageLengthId);
VSIFPrintfL(fp,
" %ld\n",
(long)(nStreamEnd - nStreamStart));
EndObj();
return eErr == CE_None ? nImageId : 0;
}
/************************************************************************/
/* WriteJavascript() */
/************************************************************************/
int GDALPDFWriter::WriteJavascript(const char* pszJavascript)
{
int nJSId = AllocNewObject();
int nJSLengthId = AllocNewObject();
StartObj(nJSId);
{
GDALPDFDictionaryRW oDict;
oDict.Add("Length", nJSLengthId, 0);
if( oPageContext.eStreamCompressMethod != COMPRESS_NONE )
{
oDict.Add("Filter", GDALPDFObjectRW::CreateName("FlateDecode"));
}
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
}
VSIFPrintfL(fp, "stream\n");
vsi_l_offset nStreamStart = VSIFTellL(fp);
VSILFILE* fpGZip = NULL;
VSILFILE* fpBack = fp;
if( oPageContext.eStreamCompressMethod != COMPRESS_NONE )
{
fpGZip = (VSILFILE* )VSICreateGZipWritable( (VSIVirtualHandle*) fp, TRUE, FALSE );
fp = fpGZip;
}
VSIFWriteL(pszJavascript, strlen(pszJavascript), 1, fp);
if (fpGZip)
VSIFCloseL(fpGZip);
fp = fpBack;
vsi_l_offset nStreamEnd = VSIFTellL(fp);
VSIFPrintfL(fp,
"\n"
"endstream\n");
EndObj();
StartObj(nJSLengthId);
VSIFPrintfL(fp,
" %ld\n",
(long)(nStreamEnd - nStreamStart));
EndObj();
nNamesId = AllocNewObject();
StartObj(nNamesId);
{
GDALPDFDictionaryRW oDict;
GDALPDFDictionaryRW* poJavaScriptDict = new GDALPDFDictionaryRW();
oDict.Add("JavaScript", poJavaScriptDict);
GDALPDFArrayRW* poNamesArray = new GDALPDFArrayRW();
poJavaScriptDict->Add("Names", poNamesArray);
poNamesArray->Add("GDAL");
GDALPDFDictionaryRW* poJSDict = new GDALPDFDictionaryRW();
poNamesArray->Add(poJSDict);
poJSDict->Add("JS", nJSId, 0);
poJSDict->Add("S", GDALPDFObjectRW::CreateName("JavaScript"));
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
}
EndObj();
return nNamesId;
}
/************************************************************************/
/* WriteJavascriptFile() */
/************************************************************************/
int GDALPDFWriter::WriteJavascriptFile(const char* pszJavascriptFile)
{
int nRet = 0;
char* pszJavascriptToFree = (char*)CPLMalloc(65536);
VSILFILE* fpJS = VSIFOpenL(pszJavascriptFile, "rb");
if( fpJS != NULL )
{
int nRead = (int)VSIFReadL(pszJavascriptToFree, 1, 65536, fpJS);
if( nRead < 65536 )
{
pszJavascriptToFree[nRead] = '\0';
nRet = WriteJavascript(pszJavascriptToFree);
}
VSIFCloseL(fpJS);
}
CPLFree(pszJavascriptToFree);
return nRet;
}
/************************************************************************/
/* WritePages() */
/************************************************************************/
void GDALPDFWriter::WritePages()
{
StartObj(nPageResourceId);
{
GDALPDFDictionaryRW oDict;
GDALPDFArrayRW* poKids = new GDALPDFArrayRW();
oDict.Add("Type", GDALPDFObjectRW::CreateName("Pages"))
.Add("Count", (int)asPageId.size())
.Add("Kids", poKids);
for(size_t i=0;i<asPageId.size();i++)
poKids->Add(asPageId[i], 0);
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
}
EndObj();
StartObj(nCatalogId);
{
GDALPDFDictionaryRW oDict;
oDict.Add("Type", GDALPDFObjectRW::CreateName("Catalog"))
.Add("Pages", nPageResourceId, 0);
if (nXMPId)
oDict.Add("Metadata", nXMPId, 0);
if (asOCGs.size())
{
GDALPDFDictionaryRW* poDictOCProperties = new GDALPDFDictionaryRW();
oDict.Add("OCProperties", poDictOCProperties);
GDALPDFDictionaryRW* poDictD = new GDALPDFDictionaryRW();
poDictOCProperties->Add("D", poDictD);
/* Build "Order" array of D dict */
GDALPDFArrayRW* poArrayOrder = new GDALPDFArrayRW();
size_t i;
for(i=0;i<asOCGs.size();i++)
{
poArrayOrder->Add(asOCGs[i].nId, 0);
if (i + 1 < asOCGs.size() && asOCGs[i+1].nParentId == asOCGs[i].nId)
{
GDALPDFArrayRW* poSubArrayOrder = new GDALPDFArrayRW();
poSubArrayOrder->Add(asOCGs[i+1].nId, 0);
poArrayOrder->Add(poSubArrayOrder);
i ++;
}
}
poDictD->Add("Order", poArrayOrder);
/* Build "OFF" array of D dict */
if( osOffLayers.size() )
{
GDALPDFArrayRW* poArrayOFF = new GDALPDFArrayRW();
char** papszTokens = CSLTokenizeString2(osOffLayers, ",", 0);
for(int i=0; papszTokens[i] != NULL; i++)
{
size_t j;
int bFound = FALSE;
for(j=0;j<asOCGs.size();j++)
{
if( strcmp(papszTokens[i], asOCGs[j].osLayerName) == 0)
{
poArrayOFF->Add(asOCGs[j].nId, 0);
bFound = TRUE;
}
if (j + 1 < asOCGs.size() && asOCGs[j+1].nParentId == asOCGs[j].nId)
{
j ++;
}
}
if( !bFound )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Unknown layer name (%s) specified in OFF_LAYERS",
papszTokens[i]);
}
}
CSLDestroy(papszTokens);
poDictD->Add("OFF", poArrayOFF);
}
/* Build "RBGroups" array of D dict */
if( osExclusiveLayers.size() )
{
GDALPDFArrayRW* poArrayRBGroups = new GDALPDFArrayRW();
char** papszTokens = CSLTokenizeString2(osExclusiveLayers, ",", 0);
for(int i=0; papszTokens[i] != NULL; i++)
{
size_t j;
int bFound = FALSE;
for(j=0;j<asOCGs.size();j++)
{
if( strcmp(papszTokens[i], asOCGs[j].osLayerName) == 0)
{
poArrayRBGroups->Add(asOCGs[j].nId, 0);
bFound = TRUE;
}
if (j + 1 < asOCGs.size() && asOCGs[j+1].nParentId == asOCGs[j].nId)
{
j ++;
}
}
if( !bFound )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Unknown layer name (%s) specified in EXCLUSIVE_LAYERS",
papszTokens[i]);
}
}
CSLDestroy(papszTokens);
if( poArrayRBGroups->GetLength() )
{
GDALPDFArrayRW* poMainArrayRBGroups = new GDALPDFArrayRW();
poMainArrayRBGroups->Add(poArrayRBGroups);
poDictD->Add("RBGroups", poMainArrayRBGroups);
}
else
delete poArrayRBGroups;
}
GDALPDFArrayRW* poArrayOGCs = new GDALPDFArrayRW();
for(i=0;i<asOCGs.size();i++)
poArrayOGCs->Add(asOCGs[i].nId, 0);
poDictOCProperties->Add("OCGs", poArrayOGCs);
}
if (nStructTreeRootId)
{
GDALPDFDictionaryRW* poDictMarkInfo = new GDALPDFDictionaryRW();
oDict.Add("MarkInfo", poDictMarkInfo);
poDictMarkInfo->Add("UserProperties", GDALPDFObjectRW::CreateBool(TRUE));
oDict.Add("StructTreeRoot", nStructTreeRootId, 0);
}
if (nNamesId)
oDict.Add("Names", nNamesId, 0);
VSIFPrintfL(fp, "%s\n", oDict.Serialize().c_str());
}
EndObj();
}
/************************************************************************/
/* GDALPDFGetJPEGQuality() */
/************************************************************************/
static int GDALPDFGetJPEGQuality(char** papszOptions)
{
int nJpegQuality = -1;
const char* pszValue = CSLFetchNameValue( papszOptions, "JPEG_QUALITY" );
if( pszValue != NULL )
{
nJpegQuality = atoi( pszValue );
if (!(nJpegQuality >= 1 && nJpegQuality <= 100))
{
CPLError( CE_Warning, CPLE_IllegalArg,
"JPEG_QUALITY=%s value not recognised, ignoring.",
pszValue );
nJpegQuality = -1;
}
}
return nJpegQuality;
}
/************************************************************************/
/* GDALPDFClippingDataset */
/************************************************************************/
class GDALPDFClippingDataset: public GDALDataset
{
GDALDataset* poSrcDS;
double adfGeoTransform[6];
public:
GDALPDFClippingDataset(GDALDataset* poSrcDS, double adfClippingExtent[4]) : poSrcDS(poSrcDS)
{
double adfSrcGeoTransform[6];
poSrcDS->GetGeoTransform(adfSrcGeoTransform);
adfGeoTransform[0] = adfClippingExtent[0];
adfGeoTransform[1] = adfSrcGeoTransform[1];
adfGeoTransform[2] = 0.0;
adfGeoTransform[3] = adfSrcGeoTransform[5] < 0 ? adfClippingExtent[3] : adfClippingExtent[1];
adfGeoTransform[4] = 0.0;
adfGeoTransform[5] = adfSrcGeoTransform[5];
nRasterXSize = (int)((adfClippingExtent[2] - adfClippingExtent[0]) / adfSrcGeoTransform[1]);
nRasterYSize = (int)((adfClippingExtent[3] - adfClippingExtent[1]) / fabs(adfSrcGeoTransform[5]));
}
virtual CPLErr GetGeoTransform( double * padfGeoTransform )
{
memcpy(padfGeoTransform, adfGeoTransform, 6 * sizeof(double));
return CE_None;
}
virtual const char* GetProjectionRef()
{
return poSrcDS->GetProjectionRef();
}
};
/************************************************************************/
/* GDALPDFCreateCopy() */
/************************************************************************/
GDALDataset *GDALPDFCreateCopy( const char * pszFilename,
GDALDataset *poSrcDS,
int bStrict,
char **papszOptions,
GDALProgressFunc pfnProgress,
void * pProgressData )
{
int nBands = poSrcDS->GetRasterCount();
int nWidth = poSrcDS->GetRasterXSize();
int nHeight = poSrcDS->GetRasterYSize();
if( !pfnProgress( 0.0, NULL, pProgressData ) )
return NULL;
/* -------------------------------------------------------------------- */
/* Some some rudimentary checks */
/* -------------------------------------------------------------------- */
if( nBands != 1 && nBands != 3 && nBands != 4 )
{
CPLError( CE_Failure, CPLE_NotSupported,
"PDF driver doesn't support %d bands. Must be 1 (grey or with color table), "
"3 (RGB) or 4 bands.\n", nBands );
return NULL;
}
GDALDataType eDT = poSrcDS->GetRasterBand(1)->GetRasterDataType();
if( eDT != GDT_Byte )
{
CPLError( (bStrict) ? CE_Failure : CE_Warning, CPLE_NotSupported,
"PDF driver doesn't support data type %s. "
"Only eight bit byte bands supported.\n",
GDALGetDataTypeName(
poSrcDS->GetRasterBand(1)->GetRasterDataType()) );
if (bStrict)
return NULL;
}
/* -------------------------------------------------------------------- */
/* Read options. */
/* -------------------------------------------------------------------- */
PDFCompressMethod eCompressMethod = COMPRESS_DEFAULT;
const char* pszCompressMethod = CSLFetchNameValue(papszOptions, "COMPRESS");
if (pszCompressMethod)
{
if( EQUAL(pszCompressMethod, "NONE") )
eCompressMethod = COMPRESS_NONE;
else if( EQUAL(pszCompressMethod, "DEFLATE") )
eCompressMethod = COMPRESS_DEFLATE;
else if( EQUAL(pszCompressMethod, "JPEG") )
eCompressMethod = COMPRESS_JPEG;
else if( EQUAL(pszCompressMethod, "JPEG2000") )
eCompressMethod = COMPRESS_JPEG2000;
else
{
CPLError( (bStrict) ? CE_Failure : CE_Warning, CPLE_NotSupported,
"Unsupported value for COMPRESS.");
if (bStrict)
return NULL;
}
}
PDFCompressMethod eStreamCompressMethod = COMPRESS_DEFLATE;
const char* pszStreamCompressMethod = CSLFetchNameValue(papszOptions, "STREAM_COMPRESS");
if (pszStreamCompressMethod)
{
if( EQUAL(pszStreamCompressMethod, "NONE") )
eStreamCompressMethod = COMPRESS_NONE;
else if( EQUAL(pszStreamCompressMethod, "DEFLATE") )
eStreamCompressMethod = COMPRESS_DEFLATE;
else
{
CPLError( (bStrict) ? CE_Failure : CE_Warning, CPLE_NotSupported,
"Unsupported value for STREAM_COMPRESS.");
if (bStrict)
return NULL;
}
}
if (nBands == 1 &&
poSrcDS->GetRasterBand(1)->GetColorTable() != NULL &&
(eCompressMethod == COMPRESS_JPEG || eCompressMethod == COMPRESS_JPEG2000))
{
CPLError( CE_Warning, CPLE_AppDefined,
"The source raster band has a color table, which is not appropriate with JPEG or JPEG2000 compression.\n"
"You should rather consider using color table expansion (-expand option in gdal_translate)");
}
int nBlockXSize = nWidth;
int nBlockYSize = nHeight;
const char* pszValue;
int bTiled = CSLFetchBoolean( papszOptions, "TILED", FALSE );
if( bTiled )
nBlockXSize = nBlockYSize = 256;
pszValue = CSLFetchNameValue(papszOptions, "BLOCKXSIZE");
if( pszValue != NULL )
{
nBlockXSize = atoi( pszValue );
if (nBlockXSize < 0 || nBlockXSize >= nWidth)
nBlockXSize = nWidth;
}
pszValue = CSLFetchNameValue(papszOptions, "BLOCKYSIZE");
if( pszValue != NULL )
{
nBlockYSize = atoi( pszValue );
if (nBlockYSize < 0 || nBlockYSize >= nHeight)
nBlockYSize = nHeight;
}
int nJPEGQuality = GDALPDFGetJPEGQuality(papszOptions);
const char* pszJPEG2000_DRIVER = CSLFetchNameValue(papszOptions, "JPEG2000_DRIVER");
const char* pszGEO_ENCODING =
CSLFetchNameValueDef(papszOptions, "GEO_ENCODING", "ISO32000");
const char* pszXMP = CSLFetchNameValue(papszOptions, "XMP");
const char* pszPredictor = CSLFetchNameValue(papszOptions, "PREDICTOR");
int nPredictor = 1;
if (pszPredictor)
{
if (eCompressMethod == COMPRESS_DEFAULT)
eCompressMethod = COMPRESS_DEFLATE;
if (eCompressMethod != COMPRESS_DEFLATE)
{
CPLError(CE_Warning, CPLE_NotSupported,
"PREDICTOR option is only taken into account for DEFLATE compression");
}
else
{
nPredictor = atoi(pszPredictor);
if (nPredictor != 1 && nPredictor != 2)
{
CPLError(CE_Warning, CPLE_NotSupported,
"Supported PREDICTOR values are 1 or 2");
nPredictor = 1;
}
}
}
const char* pszNEATLINE = CSLFetchNameValue(papszOptions, "NEATLINE");
int nMargin = atoi(CSLFetchNameValueDef(papszOptions, "MARGIN", "0"));
PDFMargins sMargins;
sMargins.nLeft = nMargin;
sMargins.nRight = nMargin;
sMargins.nTop = nMargin;
sMargins.nBottom = nMargin;
const char* pszLeftMargin = CSLFetchNameValue(papszOptions, "LEFT_MARGIN");
if (pszLeftMargin) sMargins.nLeft = atoi(pszLeftMargin);
const char* pszRightMargin = CSLFetchNameValue(papszOptions, "RIGHT_MARGIN");
if (pszRightMargin) sMargins.nRight = atoi(pszRightMargin);
const char* pszTopMargin = CSLFetchNameValue(papszOptions, "TOP_MARGIN");
if (pszTopMargin) sMargins.nTop = atoi(pszTopMargin);
const char* pszBottomMargin = CSLFetchNameValue(papszOptions, "BOTTOM_MARGIN");
if (pszBottomMargin) sMargins.nBottom = atoi(pszBottomMargin);
const char* pszDPI = CSLFetchNameValue(papszOptions, "DPI");
double dfDPI = DEFAULT_DPI;
if( pszDPI != NULL )
dfDPI = CPLAtof(pszDPI);
double dfUserUnit = dfDPI * USER_UNIT_IN_INCH;
double dfWidthInUserUnit = nWidth / dfUserUnit + sMargins.nLeft + sMargins.nRight;
double dfHeightInUserUnit = nHeight / dfUserUnit + sMargins.nBottom + sMargins.nTop;
if( dfWidthInUserUnit > MAXIMUM_SIZE_IN_UNITS ||
dfHeightInUserUnit > MAXIMUM_SIZE_IN_UNITS )
{
if( pszDPI == NULL )
{
if( sMargins.nLeft + sMargins.nRight >= MAXIMUM_SIZE_IN_UNITS ||
sMargins.nBottom + sMargins.nTop >= MAXIMUM_SIZE_IN_UNITS )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Margins too big compared to maximum page dimension (%d) "
"in user units allowed by Acrobat",
MAXIMUM_SIZE_IN_UNITS);
}
else
{
if( dfWidthInUserUnit >= dfHeightInUserUnit )
{
dfDPI = (int)(0.5 + (double)nWidth / (MAXIMUM_SIZE_IN_UNITS -
(sMargins.nLeft + sMargins.nRight)) / USER_UNIT_IN_INCH);
}
else
{
dfDPI = (int)(0.5 + (double)nHeight / (MAXIMUM_SIZE_IN_UNITS -
(sMargins.nBottom + sMargins.nTop)) / USER_UNIT_IN_INCH);
}
CPLDebug("PDF", "Adjusting DPI to %d so that page dimension in "
"user units remain in what is accepted by Acrobat", (int)dfDPI);
}
}
else
{
CPLError(CE_Warning, CPLE_AppDefined,
"The page dimension in user units is %d x %d whereas the "
"maximum allowed by Acrobat is %d x %d",
(int)(dfWidthInUserUnit + 0.5),
(int)(dfHeightInUserUnit + 0.5),
MAXIMUM_SIZE_IN_UNITS, MAXIMUM_SIZE_IN_UNITS);
}
}
if (dfDPI < DEFAULT_DPI)
dfDPI = DEFAULT_DPI;
const char* pszClippingExtent = CSLFetchNameValue(papszOptions, "CLIPPING_EXTENT");
int bUseClippingExtent = FALSE;
double adfClippingExtent[4] = { 0.0, 0.0, 0.0, 0.0 };
if( pszClippingExtent != NULL )
{
char** papszTokens = CSLTokenizeString2(pszClippingExtent, ",", 0);
if( CSLCount(papszTokens) == 4 )
{
bUseClippingExtent = TRUE;
adfClippingExtent[0] = CPLAtof(papszTokens[0]);
adfClippingExtent[1] = CPLAtof(papszTokens[1]);
adfClippingExtent[2] = CPLAtof(papszTokens[2]);
adfClippingExtent[3] = CPLAtof(papszTokens[3]);
if( adfClippingExtent[0] > adfClippingExtent[2] ||
adfClippingExtent[1] > adfClippingExtent[3] )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Invalid value for CLIPPING_EXTENT. Should be xmin,ymin,xmax,ymax");
bUseClippingExtent = TRUE;
}
if( bUseClippingExtent )
{
double adfGeoTransform[6];
if( poSrcDS->GetGeoTransform(adfGeoTransform) == CE_None )
{
if( adfGeoTransform[2] != 0.0 || adfGeoTransform[4] != 0.0 )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Cannot use CLIPPING_EXTENT because main raster has a rotated geotransform");
bUseClippingExtent = TRUE;
}
}
else
{
CPLError(CE_Warning, CPLE_AppDefined,
"Cannot use CLIPPING_EXTENT because main raster has no geotransform");
bUseClippingExtent = TRUE;
}
}
}
CSLDestroy(papszTokens);
}
const char* pszLayerName = CSLFetchNameValue(papszOptions, "LAYER_NAME");
const char* pszExtraImages = CSLFetchNameValue(papszOptions, "EXTRA_IMAGES");
const char* pszExtraStream = CSLFetchNameValue(papszOptions, "EXTRA_STREAM");
const char* pszExtraLayerName = CSLFetchNameValue(papszOptions, "EXTRA_LAYER_NAME");
const char* pszOGRDataSource = CSLFetchNameValue(papszOptions, "OGR_DATASOURCE");
const char* pszOGRDisplayField = CSLFetchNameValue(papszOptions, "OGR_DISPLAY_FIELD");
const char* pszOGRDisplayLayerNames = CSLFetchNameValue(papszOptions, "OGR_DISPLAY_LAYER_NAMES");
const char* pszOGRLinkField = CSLFetchNameValue(papszOptions, "OGR_LINK_FIELD");
int bWriteOGRAttributes = CSLFetchBoolean(papszOptions, "OGR_WRITE_ATTRIBUTES", TRUE);
const char* pszExtraRasters = CSLFetchNameValue(papszOptions, "EXTRA_RASTERS");
const char* pszExtraRastersLayerName = CSLFetchNameValue(papszOptions, "EXTRA_RASTERS_LAYER_NAME");
const char* pszOffLayers = CSLFetchNameValue(papszOptions, "OFF_LAYERS");
const char* pszExclusiveLayers = CSLFetchNameValue(papszOptions, "EXCLUSIVE_LAYERS");
const char* pszJavascript = CSLFetchNameValue(papszOptions, "JAVASCRIPT");
const char* pszJavascriptFile = CSLFetchNameValue(papszOptions, "JAVASCRIPT_FILE");
/* -------------------------------------------------------------------- */
/* Create file. */
/* -------------------------------------------------------------------- */
VSILFILE* fp = VSIFOpenL(pszFilename, "wb");
if( fp == NULL )
{
CPLError( CE_Failure, CPLE_OpenFailed,
"Unable to create PDF file %s.\n",
pszFilename );
return NULL;
}
GDALPDFWriter oWriter(fp);
GDALDataset* poClippingDS = poSrcDS;
if( bUseClippingExtent )
poClippingDS = new GDALPDFClippingDataset(poSrcDS, adfClippingExtent);
if( CSLFetchBoolean(papszOptions, "WRITE_INFO", TRUE) )
oWriter.SetInfo(poSrcDS, papszOptions);
oWriter.SetXMP(poClippingDS, pszXMP);
oWriter.StartPage(poClippingDS,
dfDPI,
pszGEO_ENCODING,
pszNEATLINE,
&sMargins,
eStreamCompressMethod,
pszOGRDataSource != NULL && bWriteOGRAttributes);
int bRet;
if( !bUseClippingExtent )
{
bRet = oWriter.WriteImagery(poSrcDS,
pszLayerName,
eCompressMethod,
nPredictor,
nJPEGQuality,
pszJPEG2000_DRIVER,
nBlockXSize, nBlockYSize,
pfnProgress, pProgressData);
}
else
{
bRet = oWriter.WriteClippedImagery(poSrcDS,
pszLayerName,
eCompressMethod,
nPredictor,
nJPEGQuality,
pszJPEG2000_DRIVER,
nBlockXSize, nBlockYSize,
pfnProgress, pProgressData);
}
char** papszExtraRasters = CSLTokenizeString2(
pszExtraRasters ? pszExtraRasters : "", ",", 0);
char** papszExtraRastersLayerName = CSLTokenizeString2(
pszExtraRastersLayerName ? pszExtraRastersLayerName : "", ",", 0);
int bUseExtraRastersLayerName = (CSLCount(papszExtraRasters) ==
CSLCount(papszExtraRastersLayerName));
int bUseExtraRasters = TRUE;
const char* pszClippingProjectionRef = poSrcDS->GetProjectionRef();
if( CSLCount(papszExtraRasters) != 0 )
{
double adfGeoTransform[6];
if( poSrcDS->GetGeoTransform(adfGeoTransform) == CE_None )
{
if( adfGeoTransform[2] != 0.0 || adfGeoTransform[4] != 0.0 )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Cannot use EXTRA_RASTERS because main raster has a rotated geotransform");
bUseExtraRasters = FALSE;
}
}
else
{
CPLError(CE_Warning, CPLE_AppDefined,
"Cannot use EXTRA_RASTERS because main raster has no geotransform");
bUseExtraRasters = FALSE;
}
if( bUseExtraRasters &&
(pszClippingProjectionRef == NULL ||
pszClippingProjectionRef[0] == '\0') )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Cannot use EXTRA_RASTERS because main raster has no projection");
bUseExtraRasters = FALSE;
}
}
for(int i=0; bRet && bUseExtraRasters && papszExtraRasters[i] != NULL; i++)
{
GDALDataset* poDS = (GDALDataset*)GDALOpen(papszExtraRasters[i], GA_ReadOnly);
if( poDS != NULL )
{
double adfGeoTransform[6];
int bUseRaster = TRUE;
if( poDS->GetGeoTransform(adfGeoTransform) == CE_None )
{
if( adfGeoTransform[2] != 0.0 || adfGeoTransform[4] != 0.0 )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Cannot use %s because it has a rotated geotransform",
papszExtraRasters[i]);
bUseRaster = FALSE;
}
}
else
{
CPLError(CE_Warning, CPLE_AppDefined,
"Cannot use %s because it has no geotransform",
papszExtraRasters[i]);
bUseRaster = FALSE;
}
const char* pszProjectionRef = poDS->GetProjectionRef();
if( bUseRaster &&
(pszProjectionRef == NULL || pszProjectionRef[0] == '\0') )
{
CPLError(CE_Warning, CPLE_AppDefined,
"Cannot use %s because it has no projection",
papszExtraRasters[i]);
bUseRaster = FALSE;
}
if( bUseRaster )
{
if( pszClippingProjectionRef != NULL &&
pszProjectionRef != NULL &&
!EQUAL(pszClippingProjectionRef, pszProjectionRef) )
{
OGRSpatialReferenceH hClippingSRS =
OSRNewSpatialReference(pszClippingProjectionRef);
OGRSpatialReferenceH hSRS =
OSRNewSpatialReference(pszProjectionRef);
if (!OSRIsSame(hClippingSRS, hSRS))
{
CPLError(CE_Warning, CPLE_AppDefined,
"Cannot use %s because it has a different projection than main dataset",
papszExtraRasters[i]);
bUseRaster = FALSE;
}
OSRDestroySpatialReference(hClippingSRS);
OSRDestroySpatialReference(hSRS);
}
}
if( bUseRaster )
{
bRet = oWriter.WriteClippedImagery(poDS,
bUseExtraRastersLayerName ?
papszExtraRastersLayerName[i] : NULL,
eCompressMethod,
nPredictor,
nJPEGQuality,
pszJPEG2000_DRIVER,
nBlockXSize, nBlockYSize,
NULL, NULL);
}
GDALClose(poDS);
}
}
CSLDestroy(papszExtraRasters);
CSLDestroy(papszExtraRastersLayerName);
#ifdef OGR_ENABLED
if (bRet && pszOGRDataSource != NULL)
oWriter.WriteOGRDataSource(pszOGRDataSource,
pszOGRDisplayField,
pszOGRDisplayLayerNames,
pszOGRLinkField,
bWriteOGRAttributes);
#endif
if (bRet)
oWriter.EndPage(pszExtraImages,
pszExtraStream,
pszExtraLayerName,
pszOffLayers,
pszExclusiveLayers);
if (pszJavascript)
oWriter.WriteJavascript(pszJavascript);
else if (pszJavascriptFile)
oWriter.WriteJavascriptFile(pszJavascriptFile);
oWriter.Close();
if (poClippingDS != poSrcDS)
delete poClippingDS;
if (!bRet)
{
VSIUnlink(pszFilename);
return NULL;
}
else
{
#if defined(HAVE_POPPLER) || defined(HAVE_PODOFO)
return GDALPDFOpen(pszFilename, GA_ReadOnly);
#else
return new GDALFakePDFDataset();
#endif
}
}
| apache-2.0 |
ebyhr/presto | core/trino-main/src/main/java/io/trino/sql/planner/optimizations/OptimizerStats.java | 1314 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.sql.planner.optimizations;
import io.airlift.stats.TimeDistribution;
import org.weakref.jmx.Managed;
import org.weakref.jmx.Nested;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
public class OptimizerStats
{
private final AtomicLong failures = new AtomicLong();
private final TimeDistribution time = new TimeDistribution(TimeUnit.MICROSECONDS);
public void record(long nanos)
{
time.add(nanos);
}
public void recordFailure()
{
failures.incrementAndGet();
}
@Managed
@Nested
public TimeDistribution getTime()
{
return time;
}
@Managed
public long getFailures()
{
return failures.get();
}
}
| apache-2.0 |
lutts/testability-explorer | eclipse-plugin/plugins/com.google.test.metric.eclipse.ui/src/main/java/com/google/test/metric/eclipse/ui/plugin/Activator.java | 2728 | /*
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.test.metric.eclipse.ui.plugin;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.swt.graphics.Image;
import org.eclipse.ui.plugin.AbstractUIPlugin;
import org.osgi.framework.BundleContext;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
/**
* The activator class controls the plug-in life cycle
*/
public class Activator extends AbstractUIPlugin {
private Map<String, Image> images = new HashMap<String, Image>();
// The plug-in ID
public static final String PLUGIN_ID = "com.google.test.metric.eclipse.ui";
// The shared instance
private static Activator plugin;
/**
* The constructor
*/
public Activator() {
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext
* )
*/
@Override
public void start(BundleContext context) throws Exception {
super.start(context);
plugin = this;
}
/*
* (non-Javadoc)
*
* @see
* org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext
* )
*/
@Override
public void stop(BundleContext context) throws Exception {
plugin = null;
super.stop(context);
}
/**
* Returns the shared instance
*
* @return the shared instance
*/
public static Activator getDefault() {
return plugin;
}
public Image getImage(String path) throws ImageNotFoundException {
Image image = images.get(path);
if (image == null) {
String pluginLocation = Activator.getDefault().getBundle().getLocation();
if (pluginLocation.startsWith("reference:")) {
pluginLocation = pluginLocation.substring(10);
}
URL url;
try {
url = new URL(pluginLocation + path);
} catch (MalformedURLException e) {
throw new ImageNotFoundException("Image : " + path + " not found");
}
ImageDescriptor projectImageDescriptor = ImageDescriptor.createFromURL(url);
image = projectImageDescriptor.createImage();
images.put(path, image);
}
return image;
}
}
| apache-2.0 |
nugget/home-assistant | homeassistant/components/binary_sensor/command_line.py | 3447 | """
Support for custom shell commands to retrieve values.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/binary_sensor.command_line/
"""
import logging
from datetime import timedelta
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.binary_sensor import (
BinarySensorDevice, DEVICE_CLASSES_SCHEMA, PLATFORM_SCHEMA)
from homeassistant.components.sensor.command_line import CommandSensorData
from homeassistant.const import (
CONF_PAYLOAD_OFF, CONF_PAYLOAD_ON, CONF_NAME, CONF_VALUE_TEMPLATE,
CONF_COMMAND, CONF_DEVICE_CLASS)
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'Binary Command Sensor'
DEFAULT_PAYLOAD_ON = 'ON'
DEFAULT_PAYLOAD_OFF = 'OFF'
SCAN_INTERVAL = timedelta(seconds=60)
CONF_COMMAND_TIMEOUT = 'command_timeout'
DEFAULT_TIMEOUT = 15
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_COMMAND): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(
CONF_COMMAND_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Command line Binary Sensor."""
name = config.get(CONF_NAME)
command = config.get(CONF_COMMAND)
payload_off = config.get(CONF_PAYLOAD_OFF)
payload_on = config.get(CONF_PAYLOAD_ON)
device_class = config.get(CONF_DEVICE_CLASS)
value_template = config.get(CONF_VALUE_TEMPLATE)
command_timeout = config.get(CONF_COMMAND_TIMEOUT)
if value_template is not None:
value_template.hass = hass
data = CommandSensorData(hass, command, command_timeout)
add_entities([CommandBinarySensor(
hass, data, name, device_class, payload_on, payload_off,
value_template)], True)
class CommandBinarySensor(BinarySensorDevice):
"""Representation of a command line binary sensor."""
def __init__(self, hass, data, name, device_class, payload_on,
payload_off, value_template):
"""Initialize the Command line binary sensor."""
self._hass = hass
self.data = data
self._name = name
self._device_class = device_class
self._state = False
self._payload_on = payload_on
self._payload_off = payload_off
self._value_template = value_template
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
@ property
def device_class(self):
"""Return the class of the binary sensor."""
return self._device_class
def update(self):
"""Get the latest data and updates the state."""
self.data.update()
value = self.data.value
if self._value_template is not None:
value = self._value_template.render_with_possible_json_value(
value, False)
if value == self._payload_on:
self._state = True
elif value == self._payload_off:
self._state = False
| apache-2.0 |
frett/cas | api/cas-server-core-api-util/src/main/java/org/apereo/cas/DistributedCacheManager.java | 2715 | package org.apereo.cas;
import lombok.val;
import java.io.Closeable;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Optional;
import java.util.function.Predicate;
/**
* This is {@link DistributedCacheManager} that acts as a facade for a cache implementation.
* It's designed via generics to accept a key, a value associated with that key and an output object.
* While mostly value and output are one of the same, these are made separate intentionally
* to avoid serialization issues and provide flexibility to provide transformations on the final result.
*
* @param <K> the type parameter
* @param <V> the type parameter
* @author Misagh Moayyed
* @since 5.2.0
*/
@FunctionalInterface
public interface DistributedCacheManager<K extends Serializable, V extends DistributedCacheObject> extends Closeable {
/**
* Get item.
*
* @param key the key
* @return the item or null if not found.
*/
default V get(final K key) {
return null;
}
/**
* Gets all items in the cache.
*
* @return the all
*/
default Collection<V> getAll() {
return new ArrayList<>(0);
}
/**
* Set item in the cache.
*
* @param key the key
* @param item the item to store in the cache
*/
default void set(final K key, final V item) {
}
/**
* Contains key in the cache?
*
* @param key the key
* @return true /false
*/
default boolean contains(final K key) {
return false;
}
/**
* update key/item from the cache and overwrite.
*
* @param key the key
* @param item the item
*/
default void update(final K key, final V item) {
}
/**
* Remove key/item from the cache.
*
* @param key the key
* @param item the item
*/
default void remove(final K key, final V item) {
}
/**
* Gets the cache impl name.
*
* @return the name
*/
default String getName() {
return this.getClass().getSimpleName();
}
/**
* Find values matching this predicate.
*
* @param filter the filter
* @return the collection
*/
default Collection<V> findAll(final Predicate<V> filter) {
return new ArrayList<>(0);
}
/**
* Find values matching this predicate.
*
* @param filter the filter
* @return the collection
*/
default Optional<V> find(final Predicate<V> filter) {
val results = findAll(filter);
if (results.isEmpty()) {
return Optional.empty();
}
return Optional.of(results.iterator().next());
}
}
| apache-2.0 |
brettsam/azure-mobile-apps-net-server | src/Microsoft.Azure.Mobile.Server.Authentication/IAppServiceTokenHandler.cs | 3295 | // ----------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// ----------------------------------------------------------------------------
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Security.Claims;
namespace Microsoft.Azure.Mobile.Server.Authentication
{
/// <summary>
/// Provides an abstraction for handling security tokens. This abstraction can be used for validating security
/// tokens and creating <see cref="ClaimsPrincipal"/> instances.
/// </summary>
public interface IAppServiceTokenHandler
{
/// <summary>
/// Validates a string representation of a mobile service authentication token used to authenticate a user request.
/// </summary>
/// <param name="token">A <see cref="string"/> representation of the authentication token to validate.</param>
/// <param name="signingKey">The secret key with which the token has been signed.</param>
/// <param name="validAudiences">The valid audiences to accept in token validation.</param>
/// <param name="validIssuers">The valid issuers to accept in token validation.</param>
/// <param name="claimsPrincipal">The resulting <see cref="ClaimsPrincipal"/> if the token is valid; null otherwise.</param>
/// <returns><c>true</c> if <paramref name="token"/> is valid; otherwise <c>false</c>/</returns>
bool TryValidateLoginToken(string token, string signingKey, IEnumerable<string> validAudiences, IEnumerable<string> validIssuers, out ClaimsPrincipal claimsPrincipal);
/// <summary>
/// Creates a user id value contained within a <see cref="ProviderCredentials"/>. The user id is of the form
/// <c>ProviderName:ProviderId</c> where the <c>ProviderName</c> is the unique identifier for the login provider
/// and the <c>ProviderId</c> is the provider specific id for a given user.
/// </summary>
/// <param name="providerName">The login provider name.</param>
/// <param name="providerUserId">The provider specific user id.</param>
/// <returns>A formatted <see cref="string"/> representing the resulting value.</returns>
string CreateUserId(string providerName, string providerUserId);
/// <summary>
/// Parses a user id into its two components: a <c>ProviderName</c> which uniquely identifies the login provider
/// and the <c>ProviderId</c> which identifies the provider specific id for a given user.
/// </summary>
/// <param name="userId">The input value to parse.</param>
/// <param name="providerName">The login provider name; or <c>null</c> if the <paramref name="userId"/> is not valid.</param>
/// <param name="providerUserId">The provider specific user id; or <c>null</c> is the <paramref name="userId"/> is not valid.</param>
/// <returns><c>true</c> if <paramref name="userId"/> is valid; otherwise <c>false</c>/</returns>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is not unreasonable for this API.")]
bool TryParseUserId(string userId, out string providerName, out string providerUserId);
}
} | apache-2.0 |
robertwb/incubator-beam | runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/SimpleStateRegistry.java | 3019 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.core.metrics;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.beam.model.pipeline.v1.MetricsApi.MonitoringInfo;
import org.apache.beam.vendor.grpc.v1p36p0.com.google.protobuf.ByteString;
/**
* A Class for registering SimpleExecutionStates with and extracting execution time MonitoringInfos.
*/
@SuppressWarnings({
"nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
})
public class SimpleStateRegistry {
private List<SimpleExecutionState> executionStates = new ArrayList<SimpleExecutionState>();
public void register(SimpleExecutionState state) {
this.executionStates.add(state);
}
/** Reset the registered SimpleExecutionStates. */
public void reset() {
for (SimpleExecutionState state : executionStates) {
state.reset();
}
}
/** @return Execution Time MonitoringInfos based on the tracked start or finish function. */
public List<MonitoringInfo> getExecutionTimeMonitoringInfos() {
List<MonitoringInfo> monitoringInfos = new ArrayList<MonitoringInfo>();
for (SimpleExecutionState state : executionStates) {
SimpleMonitoringInfoBuilder builder = new SimpleMonitoringInfoBuilder();
builder.setUrn(state.getUrn());
for (Map.Entry<String, String> entry : state.getLabels().entrySet()) {
builder.setLabel(entry.getKey(), entry.getValue());
}
builder.setInt64SumValue(state.getTotalMillis());
monitoringInfos.add(builder.build());
}
return monitoringInfos;
}
public Map<String, ByteString> getExecutionTimeMonitoringData(ShortIdMap shortIds) {
Map<String, ByteString> result = new HashMap<>(executionStates.size());
for (SimpleExecutionState state : executionStates) {
if (state.getTotalMillis() != 0) {
String shortId = state.getTotalMillisShortId(shortIds);
if (result.containsKey(shortId)) {
// This can happen due to flatten unzipping.
result.put(shortId, state.mergeTotalMillisPayload(result.get(shortId)));
} else {
result.put(shortId, state.getTotalMillisPayload());
}
}
}
return result;
}
}
| apache-2.0 |
mingjian2049/zstack | sdk/src/main/java/org/zstack/sdk/UpdateEcsInstanceVncPasswordAction.java | 2501 | package org.zstack.sdk;
import java.util.HashMap;
import java.util.Map;
public class UpdateEcsInstanceVncPasswordAction extends AbstractAction {
private static final HashMap<String, Parameter> parameterMap = new HashMap<>();
public static class Result {
public ErrorCode error;
public UpdateEcsInstanceVncPasswordResult value;
public Result throwExceptionIfError() {
if (error != null) {
throw new ApiException(
String.format("error[code: %s, description: %s, details: %s]", error.code, error.description, error.details)
);
}
return this;
}
}
@Param(required = true, nonempty = false, nullElements = false, emptyString = true, noTrim = false)
public java.lang.String uuid;
@Param(required = true, validRegexValues = "[A-Za-z0-9]{6}", maxLength = 6, minLength = 6, nonempty = false, nullElements = false, emptyString = true, noTrim = false)
public java.lang.String password;
@Param(required = false)
public java.util.List systemTags;
@Param(required = false)
public java.util.List userTags;
@Param(required = true)
public String sessionId;
public long timeout;
public long pollingInterval;
private Result makeResult(ApiResult res) {
Result ret = new Result();
if (res.error != null) {
ret.error = res.error;
return ret;
}
UpdateEcsInstanceVncPasswordResult value = res.getResult(UpdateEcsInstanceVncPasswordResult.class);
ret.value = value == null ? new UpdateEcsInstanceVncPasswordResult() : value;
return ret;
}
public Result call() {
ApiResult res = ZSClient.call(this);
return makeResult(res);
}
public void call(final Completion<Result> completion) {
ZSClient.call(this, new InternalCompletion() {
@Override
public void complete(ApiResult res) {
completion.complete(makeResult(res));
}
});
}
Map<String, Parameter> getParameterMap() {
return parameterMap;
}
RestInfo getRestInfo() {
RestInfo info = new RestInfo();
info.httpMethod = "PUT";
info.path = "/hybrid/aliyun/ecs-vnc/{uuid}/actions";
info.needSession = true;
info.needPoll = true;
info.parameterName = "updateEcsInstanceVncPassword";
return info;
}
}
| apache-2.0 |
alvarosimon/one | src/onedb/local/4.10.3_to_4.11.80.rb | 8285 | # -------------------------------------------------------------------------- #
# Copyright 2002-2016, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
require 'nokogiri'
module Migrator
def db_version
"4.11.80"
end
def one_version
"OpenNebula 4.11.80"
end
def up
init_log_time()
########################################################################
# Showback
########################################################################
@db.run "CREATE TABLE vm_showback (vmid INTEGER, year INTEGER, month INTEGER, body MEDIUMTEXT, PRIMARY KEY(vmid, year, month));"
log_time()
########################################################################
# Security Groups
########################################################################
oneadmin_uname = nil
@db.fetch("SELECT name FROM user_pool WHERE oid=0") do |row|
oneadmin_uname = row[:name]
end
if oneadmin_uname == nil
puts "Error trying to read oneadmin's user name ('SELECT name FROM user_pool WHERE oid=0')"
return false
end
oneadmin_gname = nil
@db.fetch("SELECT name FROM group_pool WHERE oid=0") do |row|
oneadmin_gname = row[:name]
end
if oneadmin_gname == nil
puts "Error trying to read oneadmin's group name ('SELECT name FROM group_pool WHERE oid=0')"
return false
end
@db.run "CREATE TABLE secgroup_pool (oid INTEGER PRIMARY KEY, name VARCHAR(128), body MEDIUMTEXT, uid INTEGER, gid INTEGER, owner_u INTEGER, group_u INTEGER, other_u INTEGER, UNIQUE(name,uid));"
@db.run "INSERT INTO secgroup_pool VALUES(0,'default','<SECURITY_GROUP><ID>0</ID><UID>0</UID><GID>0</GID><UNAME>#{oneadmin_uname}</UNAME><GNAME>#{oneadmin_gname}</GNAME><NAME>default</NAME><PERMISSIONS><OWNER_U>1</OWNER_U><OWNER_M>1</OWNER_M><OWNER_A>1</OWNER_A><GROUP_U>1</GROUP_U><GROUP_M>0</GROUP_M><GROUP_A>0</GROUP_A><OTHER_U>1</OTHER_U><OTHER_M>0</OTHER_M><OTHER_A>0</OTHER_A></PERMISSIONS><VMS></VMS><TEMPLATE><DESCRIPTION><![CDATA[The default security group is added to every network. Use it to add default filter rules for your networks. You may remove this security group from any network by updating its properties.]]></DESCRIPTION><RULE><PROTOCOL><![CDATA[ALL]]></PROTOCOL><RULE_TYPE><![CDATA[OUTBOUND]]></RULE_TYPE></RULE><RULE><PROTOCOL><![CDATA[ALL]]></PROTOCOL><RULE_TYPE><![CDATA[INBOUND]]></RULE_TYPE></RULE></TEMPLATE></SECURITY_GROUP>',0,0,1,1,1);"
@db.run "INSERT INTO pool_control VALUES('secgroup_pool',99);"
@db.run "ALTER TABLE network_pool RENAME TO old_network_pool;"
@db.run "CREATE TABLE network_pool (oid INTEGER PRIMARY KEY, name VARCHAR(128), body MEDIUMTEXT, uid INTEGER, gid INTEGER, owner_u INTEGER, group_u INTEGER, other_u INTEGER, cid INTEGER, pid INTEGER, UNIQUE(name,uid));"
@db.transaction do
@db.fetch("SELECT * FROM old_network_pool") do |row|
doc = Nokogiri::XML(row[:body],nil,NOKOGIRI_ENCODING){|c| c.default_xml.noblanks}
template = doc.root.at_xpath("TEMPLATE")
# The cleaner doc.create_cdata(txt) is not supported in
# old versions of nokogiri
template.add_child(doc.create_element("SECURITY_GROUPS")).
add_child(Nokogiri::XML::CDATA.new(doc,"0"))
@db[:network_pool].insert(
:oid => row[:oid],
:name => row[:name],
:body => doc.root.to_s,
:uid => row[:uid],
:gid => row[:gid],
:owner_u => row[:owner_u],
:group_u => row[:group_u],
:other_u => row[:other_u],
:cid => row[:cid],
:pid => row[:pid])
end
end
@db.run "DROP TABLE old_network_pool;"
log_time()
########################################################################
# Datastore status
########################################################################
@db.run "ALTER TABLE datastore_pool RENAME TO old_datastore_pool;"
@db.run "CREATE TABLE datastore_pool (oid INTEGER PRIMARY KEY, name VARCHAR(128), body MEDIUMTEXT, uid INTEGER, gid INTEGER, owner_u INTEGER, group_u INTEGER, other_u INTEGER, cid INTEGER, UNIQUE(name));"
@db.transaction do
@db.fetch("SELECT * FROM old_datastore_pool") do |row|
doc = Nokogiri::XML(row[:body],nil,NOKOGIRI_ENCODING){|c| c.default_xml.noblanks}
doc.root.add_child(doc.create_element("STATE")).content = "0"
@db[:datastore_pool].insert(
:oid => row[:oid],
:name => row[:name],
:body => doc.root.to_s,
:uid => row[:uid],
:gid => row[:gid],
:owner_u => row[:owner_u],
:group_u => row[:group_u],
:other_u => row[:other_u],
:cid => row[:cid])
end
end
@db.run "DROP TABLE old_datastore_pool;"
log_time()
########################################################################
# VM previous status
########################################################################
@db.run "ALTER TABLE vm_pool RENAME TO old_vm_pool;"
@db.run "CREATE TABLE vm_pool (oid INTEGER PRIMARY KEY, name VARCHAR(128), body MEDIUMTEXT, uid INTEGER, gid INTEGER, last_poll INTEGER, state INTEGER, lcm_state INTEGER, owner_u INTEGER, group_u INTEGER, other_u INTEGER);"
@db.run "INSERT INTO vm_pool SELECT * FROM old_vm_pool WHERE state = 6;"
log_time()
@db.transaction do
@db.fetch("SELECT * FROM old_vm_pool WHERE state<>6") do |row|
doc = Nokogiri::XML(row[:body],nil,NOKOGIRI_ENCODING){|c| c.default_xml.noblanks}
["STATE", "LCM_STATE"].each do |ename|
prev_elem = doc.root.at_xpath("PREV_#{ename}")
if prev_elem.nil?
prev_elem = doc.root.add_child(
doc.create_element("PREV_#{ename}"))
prev_elem.content = doc.root.at_xpath(ename).text
end
end
@db[:vm_pool].insert(
:oid => row[:oid],
:name => row[:name],
:body => doc.root.to_s,
:uid => row[:uid],
:gid => row[:gid],
:last_poll => row[:last_poll],
:state => row[:state],
:lcm_state => row[:lcm_state],
:owner_u => row[:owner_u],
:group_u => row[:group_u],
:other_u => row[:other_u])
end
end
@db.run "DROP TABLE old_vm_pool;"
log_time()
return true
end
end
| apache-2.0 |
gehaisong/ofbiz-16.11.02 | framework/base/src/main/java/org/apache/ofbiz/base/location/StandardUrlLocationResolver.java | 1350 | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.base.location;
import java.net.MalformedURLException;
import java.net.URL;
/**
* A special location resolver that uses Strings like URLs, but with more options
*
*/
public class StandardUrlLocationResolver implements LocationResolver {
public URL resolveLocation(String location) throws MalformedURLException {
return new URL(location);
}
}
| apache-2.0 |
PivotalSarge/geode-native | src/cppcache/src/AppDomainContext.cpp | 1098 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "AppDomainContext.hpp"
namespace apache {
namespace geode {
namespace client {
AppDomainContext* nullAppDomainContext() { return nullptr; }
AppDomainContext::factory createAppDomainContext = &nullAppDomainContext;
} // namespace client
} // namespace geode
} // namespace apache
| apache-2.0 |
benjyw/kythe | kythe/cxx/indexer/cxx/testdata/basic/anonymous_namespace_header.cc | 437 | // The anonymous namespace is common among headers but distinct in TUs.
#pragma kythe_claim
#include "header.h"
//- @namespace ref CcNamespace
namespace { }
#include "footer.h"
#example header.h
#pragma kythe_claim
//- @namespace=HeaderDecl ref HNamespace
//- !{ HeaderDecl ref CcNamespace }
namespace { }
#example footer.h
#pragma kythe_claim
//- @namespace=FooterDecl ref HNamespace
//- !{ FooterDecl ref CcNamespace }
namespace { }
| apache-2.0 |
manishgupta88/carbondata | core/src/main/java/org/apache/carbondata/core/datamap/status/DataMapStatus.java | 936 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.datamap.status;
/**
* DataMap status
*/
public enum DataMapStatus {
ENABLED,DISABLED,DROPPED
}
| apache-2.0 |
triggerNZ/intellij-scala | test/org/jetbrains/plugins/scala/annotator/gutter/PrefaceImportTest.scala | 619 | package org.jetbrains.plugins.scala.annotator.gutter
/**
* Pavel.Fatin, 21.01.2010
*/
class PrefaceImportTest extends LineMarkerTestBase {
protected override def getBasePath = super.getBasePath + "/preface/import/"
def testBlock = doTest
def testClass = doTest
def testFunctionDeclaration = doTest
def testFunctionDefinition = doTest
def testObject = doTest
def testPackage = doTest
def testPackageContainer = doTest
def testStatement = doTest
def testTrait = doTest
def testType = doTest
def testValue = doTest
def testVariableDeclaration = doTest
def testVariableDefinition = doTest
} | apache-2.0 |
org-scn-design-studio-community/sdkpackage1.6 | src/org.scn.community.m.basics/os/sapui5/suite/MonitoringContentRenderer-dbg.js | 2357 | /*!
* SAP UI development toolkit for HTML5 (SAPUI5) (c) Copyright 2009-2013 SAP AG. All rights reserved
*/
jQuery.sap.declare("sap.suite.ui.commons.MonitoringContentRenderer");
jQuery.sap.require("sap.ui.core.Renderer");
/**
* @class MonitoringContent renderer.
* @static
*/
sap.suite.ui.commons.MonitoringContentRenderer = {
};
/**
* Renders the HTML for the given control, using the provided {@link sap.ui.core.RenderManager}.
*
* @param {sap.ui.core.RenderManager} rm the RenderManager that can be used for writing to the render output buffer
* @param {sap.ui.core.Control} oControl an object representation of the control that should be rendered
*/
sap.suite.ui.commons.MonitoringContentRenderer.render = function(rm, oControl){
var sSize = oControl.getSize();
var sValue = oControl.getValue();
var sState = oControl.getState();
var sTooltip = oControl.getTooltip_AsString();
rm.write("<div");
rm.writeControlData(oControl);
if(sTooltip) {
rm.writeAttributeEscaped("title", sTooltip);
}
if(oControl.getAnimateTextChange()) {
rm.addStyle("opacity", "0.25");
rm.writeStyles();
}
rm.addClass(sSize);
rm.addClass("sapSuiteUiCommonsMC");
if (oControl.hasListeners("press")) {
rm.addClass("sapSuiteUiCommonsPointer");
}
rm.writeClasses();
rm.writeAttribute("tabindex", "0");
rm.write(">");
rm.write("<div");
rm.writeAttribute("id", oControl.getId() + "-value");
rm.addClass("sapSuiteUiCommonsMCValue");
rm.addClass(sSize);
rm.addClass(sState);
rm.writeClasses();
rm.write(">");
//Control shows only 4 characters. If last shown character is decimal separator -
//show only first 3 characters. So "144.5" is shown like "144" and not like "144.".
if (sValue.length >= 4 && (sValue[3] === "." || sValue[3] === ",")) {
rm.writeEscaped(sValue.substring(0, 3));
} else {
rm.writeEscaped(sValue ? sValue.substring(0,4) : "0");
}
rm.write("</div>");
rm.write("<div");
rm.writeAttribute("id", oControl.getId() + "-icon-container");
rm.addClass("sapSuiteUiCommonsMCIcon");
rm.addClass(sSize);
rm.addClass(sState);
rm.writeClasses();
rm.write(">");
rm.renderControl(oControl._oIcon);
rm.write("</div>");
rm.write("</div>");
};
| apache-2.0 |
jpodeszwik/mifos | application/src/test/java/org/mifos/schedule/HolidayAndWorkingDaysAndMoratoriaScheduledDateGenerationTest.java | 38435 | /*
* Copyright (c) 2005-2011 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.schedule;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.joda.time.DateTime;
import org.joda.time.DateTimeConstants;
import org.joda.time.Days;
import org.junit.Before;
import org.junit.Test;
import org.mifos.application.holiday.business.Holiday;
import org.mifos.application.meeting.business.MeetingBO;
import org.mifos.calendar.DayOfWeek;
import org.mifos.domain.builders.HolidayBuilder;
import org.mifos.domain.builders.MeetingBuilder;
import org.mifos.domain.builders.ScheduledEventBuilder;
import org.mifos.schedule.internal.HolidayAndWorkingDaysAndMoratoriaScheduledDateGeneration;
/**
* I test {@link HolidayAndWorkingDaysAndMoratoriaScheduledDateGeneration}.
*
* <p>Tests involve dates starting in June, 2011. To make it easier to construct tests with certain
* days of the week/month, the calendar for these months appears below:
*
* <pre>
***********************
* 2011
***********************
* S M T W T F S
* 1 2 3 4 5 6 7 May
* 8 9 10 11 12 13 14
* 15 16 17 18 19 20 21
* 22 23 24 25 26 27 28
* 29 30 31
* 1 2 3 4 June
* 5 6 7 8 9 10 11
* 12 13 14 15 16 17 18
* 19 20 21 22 23 24 25
* 26 27 28 29 30
* 1 2
* 3 4 5 6 7 8 9 July
* 10 11 12 13 14 15 16
* 17 18 19 20 21 22 23
* 24 25 26 27 28 29 30
* 31
* 1 2 3 4 5 6
* 7 8 9 10 11 12 13 August
* 14 15 16 17 18 19 20
* 21 22 23 24 25 26 27
* 28 29 30 31
* 1 2 3
* 4 5 6 7 8 9 10 September
* 11 12 13 14 15 16 17
* 18 19 20 21 22 23 24
* 25 26 27 28 29 30
* 1
* 2 3 4 5 6 7 8
* 9 10 11 12 13 14 15 October
* 16 17 18 19 20 21 22
* 23 24 25 26 27 28 29
* 30 31
* 1 2 3 4 5
* 6 7 8 9 10 11 12 November
* 13 14 15 16 17 18 19
* 20 21 22 23 24 25 26
* 27 28 29 30
*
* 1 2 3
* 4 5 6 7 8 9 10 December
* 11 12 13 14 15 16 17
* 18 19 20 21 22 23 24
* 25 26 27 28 29 30 31
***********************
* 2012 (leap year)
***********************
* 1 2 3 4 5 6 7 January
* 8 9 10 11 12 13 14
* 15 16 17 18 19 20 21
* 22 23 24 25 26 27 28
* 29 30 31
* 1 2 3 4
* 5 6 7 8 9 10 11 February (leap year)
* 12 13 14 15 16 17 18
* 19 20 21 22 23 24 25
* 26 27 28 29
* 1 2 3
* 4 5 6 7 8 9 10 March
* 11 12 13 14 15 16 17
* 18 19 20 21 22 23 24
* 25 26 27 28 29 30 31
* </pre>
*
*/
public class HolidayAndWorkingDaysAndMoratoriaScheduledDateGenerationTest {
private ScheduledDateGeneration scheduleGeneration;
private List<Days> workingDays;
//private DateTime mon_2011_07_04 = date(2011,7,4);
private DateTime mon_2011_06_20 = date(2011,6,20);
private DateTime wed_2011_06_22 = date(2011,6,22);
private DateTime wed_2011_06_29 = date(2011,6,29);
private DateTime mon_2011_07_04 = date(2011,7,4);
private DateTime mon_2011_06_27 = date(2011,6,27);
private DateTime mon_2011_06_29 = date(2011,6,29);
private Holiday sevenDayMoratoriumStartingJuly4th = new HolidayBuilder().from(mon_2011_07_04)
.to(mon_2011_07_04.plusDays(6))
.withNextMeetingRule()
.withRepaymentMoratoriumRule()
.build();
@Before
public void setupAndInjectDependencies() {
workingDays = Arrays.asList(DayOfWeek.mondayAsDay(), DayOfWeek.tuesdayAsDay(), DayOfWeek
.wednesdayAsDay(), DayOfWeek.thursdayAsDay(), DayOfWeek.fridayAsDay());
List<Holiday> upcomingHolidays = new ArrayList<Holiday>();
scheduleGeneration = new HolidayAndWorkingDaysAndMoratoriaScheduledDateGeneration(workingDays, upcomingHolidays);
//Dates frequently used in tests
}
@Test
public void canGenerateScheduledDates() {
DateTime lastScheduledDate = DayOfWeek.mondayMidnight();
ScheduledEvent recurringEvent = new ScheduledEventBuilder().every(1).weeks().on(DayOfWeek.monday()).build();
List<DateTime> scheduledDates = scheduleGeneration
.generateScheduledDates(10, lastScheduledDate, recurringEvent, false);
assertThat(scheduledDates.size(), is(notNullValue()));
assertThat(scheduledDates.size(), is(10));
}
@Test
public void canGenerateScheduledDatesThatMatchScheduledEventBasedOnLastScheduledDate() {
DateTime lastScheduledDate = DayOfWeek.mondayMidnight();
ScheduledEvent recurringEvent = new ScheduledEventBuilder().every(1).weeks().on(DayOfWeek.monday()).build();
List<DateTime> scheduledDates = scheduleGeneration
.generateScheduledDates(10, lastScheduledDate, recurringEvent, false);
assertThat(scheduledDates.get(0), is(lastScheduledDate));
assertThat(scheduledDates.get(1), is(DayOfWeek.oneWeekFrom(lastScheduledDate)));
}
@Test
public void canGenerateAllScheduledDatesThatMatchScheduleEvent() {
DateTime lastScheduledDate = DayOfWeek.mondayMidnight();
DateTime dayAfterLastScheduledDate = DayOfWeek.tuesdayMidnight();
ScheduledEvent scheduleEvent = new ScheduledEventBuilder().every(1).weeks().on(DayOfWeek.monday()).build();
List<DateTime> scheduledDates = scheduleGeneration.generateScheduledDates(10, dayAfterLastScheduledDate,
scheduleEvent, false);
DateTime lastDate = lastScheduledDate;
for (DateTime generatedDate : scheduledDates) {
assertThat(generatedDate, is(DayOfWeek.oneWeekFrom(lastDate)));
lastDate = generatedDate;
}
}
@Test
public void weeklyScheduledEventWithSecondDateInOneWeekNextMeetingHolidayShouldPushOutSecondDateOneWeek() {
Holiday oneWeekNextMeetingHolidayStartingJuly4th
= new HolidayBuilder().from(mon_2011_07_04).to(mon_2011_07_04.plusWeeks(1))
.withNextMeetingRule()
.build();
validateDates(new ScheduleBuilder().withWeeklyEvent(1, wed_2011_06_29.getDayOfWeek())
.withHolidays(oneWeekNextMeetingHolidayStartingJuly4th)
.withStartDate(wed_2011_06_29)
.withNumberOfDates(4)
.build(),
wed_2011_06_29, wed_2011_06_29.plusWeeks(2),
wed_2011_06_29.plusWeeks(2), wed_2011_06_29.plusWeeks(3));
}
@Test
public void weeklyScheduledEventSecondThirdFourthDateInThreeWeekNextMeetingHolidayShouldQuadrupleUpDatesAfterHoliday() {
Holiday threeWeekNextMeetngHolidayStartingJuly4th
= new HolidayBuilder().from(mon_2011_07_04)
.to(mon_2011_07_04.plusWeeks(3))
.withNextMeetingRule()
.build();
validateDates(new ScheduleBuilder().withHolidays(threeWeekNextMeetngHolidayStartingJuly4th)
.withWeeklyEvent(1, wed_2011_06_29.getDayOfWeek())
.withNumberOfDates(4)
.withStartDate(wed_2011_06_29.minusDays(1))
.build(),
wed_2011_06_29, wed_2011_06_29.plusWeeks(4),
wed_2011_06_29.plusWeeks(4), wed_2011_06_29.plusWeeks(4));
}
@Test
public void weeklyScheduleOneWeekMoratoriumEnclosesSecondScheduledDateShouldPushOutSecondAndThirdDatesOneWeek() {
validateDates(new ScheduleBuilder().withHolidays(sevenDayMoratoriumStartingJuly4th)
.withStartDate(wed_2011_06_29.minusDays(1))
.withWeeklyEvent(1, wed_2011_06_29.getDayOfWeek())
.withNumberOfDates(3)
.build(),
wed_2011_06_29, wed_2011_06_29.plusWeeks(2), wed_2011_06_29.plusWeeks(3));
}
@Test
public void weeklyScheduleTwoWeekMoratoriumEnclosesSecondAndThirdScheduledDatesShouldPushSecondAndFollowingDatesTwoWeeks() {
Holiday twoWeekMoratoriumStartingJuly4th = new HolidayBuilder()
.from(mon_2011_07_04)
.to(mon_2011_07_04.plusWeeks(2))
.withRepaymentMoratoriumRule()
.build();
validateDates(new ScheduleBuilder().withHolidays(twoWeekMoratoriumStartingJuly4th)
.withStartDate(wed_2011_06_29.minusDays(1))
.withWeeklyEvent(1, wed_2011_06_29.getDayOfWeek())
.withNumberOfDates(3)
.build(),
wed_2011_06_29, wed_2011_06_29.plusWeeks(3), wed_2011_06_29.plusWeeks(4));
}
@Test
public void weeklyScheduledEventWithSecondThirdFourthDateInThreeWeekSameDayHolidayExpectNoAdjustment() {
Holiday threeWeekSameDay = new HolidayBuilder()
.from(mon_2011_07_04)
.to(mon_2011_07_04.plusWeeks(3))
.withSameDayAsRule()
.build();
validateDates(new ScheduleBuilder().withHolidays(threeWeekSameDay)
.withWeeklyEvent(1, wed_2011_06_29.getDayOfWeek())
.withStartDate(wed_2011_06_29.minusDays(1))
.withNumberOfDates(5)
.build(),
wed_2011_06_29, wed_2011_06_29.plusWeeks(1), wed_2011_06_29.plusWeeks(2),
wed_2011_06_29.plusWeeks(3), wed_2011_06_29.plusWeeks(4));
}
@Test
public void weeklyScheduledEventWithMoratoriumImmediatelyFollowingNexRepaymentHoliday() {
//First holiday extends for 14 consecutive days from 2011/6/27
Holiday twoWeekNextRepaymentHoliday = new HolidayBuilder()
.from(mon_2011_06_27)
.to(mon_2011_06_27.plusWeeks(2).minusDays(1))
.withNextMeetingRule()
.build();
// Next moratorium follows for 14 days
Holiday twoWeekMoratorium = new HolidayBuilder()
.from(mon_2011_06_27.plusWeeks(2))
.to(mon_2011_06_27.plusWeeks(4).plusDays(-1))
.withRepaymentMoratoriumRule()
.build();
/*
* Generate a Wednesday schedule from the Monday the week before the first holiday starts, 2011/6/20.
* Schedule should start on the next Wednesday, 2011/6/22 (2 days from the starting point).
* The second and third dates get shifted into the first week of the moratorium, on the same
* date as the fourth date. These and the remaining schedule get shifted two mor weeks
* past the moratorium.
*/
validateDates(new ScheduleBuilder().withHolidays(twoWeekNextRepaymentHoliday, twoWeekMoratorium)
.withWeeklyEvent(1, DayOfWeek.wednesday())
.withNumberOfDates(6)
.withStartDate(mon_2011_06_20)
.build(),
wed_2011_06_22, wed_2011_06_22.plusWeeks(5),
wed_2011_06_22.plusWeeks(5), wed_2011_06_22.plusWeeks(5),
wed_2011_06_22.plusWeeks(6), wed_2011_06_22.plusWeeks(7));
}
@Test
public void weeklyScheduledEventWithMoratoriumImmediatelyFollowingSameDayHoliday() {
//First holiday extends for 14 consecutive days
Holiday twoWeekSameDayHoliday = new HolidayBuilder()
.from(mon_2011_06_27)
.to(mon_2011_06_27.plusWeeks(2).minusDays(1))
.withSameDayAsRule()
.build();
// Next moratorium follows for 14 days
Holiday twoWeekMoratorium = new HolidayBuilder()
.from(mon_2011_06_27.plusWeeks(2))
.to(mon_2011_06_27.plusWeeks(4).minusDays(1))
.withRepaymentMoratoriumRule()
.build();
/*
* Generate a Wednesday schedule from the Monday the week before the first holiday starts, 2011/6/20.
* Schedule should start on the next Wednesday, 2011/6/22 (2 days from the starting point).
* The second and third dates don't move (same-day holiday)
* Fourth and fifth dates get shifted two weeks past the moratorium.
*/
validateDates(new ScheduleBuilder().withHolidays(twoWeekSameDayHoliday, twoWeekMoratorium)
.withWeeklyEvent(1, DayOfWeek.wednesday())
.withNumberOfDates(6)
.withStartDate(mon_2011_06_20)
.build(),
wed_2011_06_22, wed_2011_06_22.plusWeeks(1),
wed_2011_06_22.plusWeeks(2), wed_2011_06_22.plusWeeks(5),
wed_2011_06_22.plusWeeks(6), wed_2011_06_22.plusWeeks(7));
}
@Test
public void weeklyScheduledEventWithMoratoriumImmediatelyFollowingNextWorkingDayHoliday() {
Holiday twoWeekNextWorkingDayHoliday = new HolidayBuilder()
.from(mon_2011_06_27)
.to(mon_2011_06_27.plusWeeks(2).minusDays(1))
.withNextWorkingDayRule()
.build();
Holiday twoWeekMoratorium = new HolidayBuilder()
.from(mon_2011_06_27.plusWeeks(2))
.to(mon_2011_06_27.plusWeeks(4).minusDays(1))
.withRepaymentMoratoriumRule()
.build();
/*
* Schedule should start on the next Wednesday (2 days from the starting point).
* The second and third unadjusted dates, being in the next-working-day holiday shift to the
* first Monday after the holiday and moratorium. Because the fourth unadjusted date is in the moratorium,
* it and the remaining schedule are shifted two more weeks past the moratorium.
*/
validateDates(new ScheduleBuilder().withHolidays(twoWeekNextWorkingDayHoliday, twoWeekMoratorium)
.withWeeklyEvent(1, DayOfWeek.wednesday())
.withNumberOfDates(6)
.withStartDate(mon_2011_06_20)
.build(),
wed_2011_06_22,
wed_2011_06_22.plusWeeks(5).withDayOfWeek(DayOfWeek.monday()),
wed_2011_06_22.plusWeeks(5).withDayOfWeek(DayOfWeek.monday()),
wed_2011_06_22.plusWeeks(5),
wed_2011_06_22.plusWeeks(6), wed_2011_06_22.plusWeeks(7));
}
@Test
public void biWeeklyScheduleNoHoliday() {
validateDates(new ScheduleBuilder().withHolidays()
.withWeeklyEvent(2, DateTimeConstants.MONDAY)
.withStartDate(date(2011,6,27))
.withNumberOfDates(3)
.build(),
date(2011, 7, 4), date(2011, 7, 18), date(2011, 8, 1));
}
@Test
public void biWeeklyScheduleSecondDateInNextMeetingHolidayShouldShiftSecondDateByTwoWeeks() {
Holiday twoWeekNextWorkingDayHoliday = new HolidayBuilder()
.from(date(2011,7,11))
.to(date(2011,7,17))
.withNextMeetingRule()
.build();
validateDates(new ScheduleBuilder().withHolidays(twoWeekNextWorkingDayHoliday)
.withWeeklyEvent(2, DateTimeConstants.MONDAY)
.withStartDate(date(2011,6,27))
.withNumberOfDates(3)
.build(),
date(2011, 7, 4), date(2011, 7, 18), date(2011, 8, 1));
}
@Test
public void biWeeklyScheduleSecondDateHitsThreeDayMoratoriumShouldPushOutSecondAndThirdDates() {
Holiday independenceDay = new HolidayBuilder()
.from(date(2011,7,4))
.to(date(2011,7,6))
.withRepaymentMoratoriumRule()
.build();
validateDates(new ScheduleBuilder().withHolidays(independenceDay)
.withWeeklyEvent(2, DateTimeConstants.MONDAY)
.withStartDate(date(2011,6,20))
.withNumberOfDates(3)
.build(),
date(2011, 6, 27), date(2011,7,11), date(2011,7 ,25));
}
@Test
public void weeklyScheduleMoratoriumSpansNextWorkingDayHolidayShouldIgnoreNextWorkingDayHoliday() {
Holiday twoWeekNextWorkingDayHoliday = new HolidayBuilder()
.from(mon_2011_06_27)
.to(mon_2011_06_27.plusWeeks(2).minusDays(1))
.withNextWorkingDayRule()
.build();
Holiday twoWeekMoratorium = new HolidayBuilder()
.from(mon_2011_06_27)
.to(mon_2011_06_27.plusWeeks(2).minusDays(1))
.withRepaymentMoratoriumRule()
.build();
/*
* Schedule should start on the next Wednesday (2 days from the starting point).
* The second and third unadjusted dates, being in the moratorium shift them
* and the remaining schedule are shifted two weeks past the moratorium. Next working day is ignored.
*/
validateDates(new ScheduleBuilder().withHolidays(twoWeekNextWorkingDayHoliday, twoWeekMoratorium)
.withWeeklyEvent(1, DayOfWeek.wednesday())
.withNumberOfDates(6)
.withStartDate(mon_2011_06_20)
.build(),
wed_2011_06_22,
wed_2011_06_22.plusWeeks(3),
wed_2011_06_22.plusWeeks(4),
wed_2011_06_22.plusWeeks(5),
wed_2011_06_22.plusWeeks(6),
wed_2011_06_22.plusWeeks(7));
}
@Test
public void weeklyScheduleMoratoriumOverlapsNextWorkingDayHolidayAndIncludesOneScheduledDateShouldIgnoreNextWorkingDayHoliday() {
Holiday oneWeekNextWorkingDayHoliday = new HolidayBuilder()
.from(mon_2011_06_27)
.to(mon_2011_06_27.plusWeeks(1).minusDays(1))
.withNextWorkingDayRule()
.build();
Holiday twoWeekMoratorium = new HolidayBuilder()
.from(mon_2011_06_29)
.to(mon_2011_06_29)
.withRepaymentMoratoriumRule()
.build();
/*
* Schedule should start on the next Wednesday (2 days from the starting point).
* The second date is in both holidays, so it and remaining dates are shifted one week. The
* second date is now past the normal holiday.
*/
validateDates(new ScheduleBuilder().withHolidays(oneWeekNextWorkingDayHoliday, twoWeekMoratorium)
.withWeeklyEvent(1, DayOfWeek.wednesday())
.withNumberOfDates(6)
.withStartDate(mon_2011_06_20)
.build(),
wed_2011_06_22,
wed_2011_06_22.plusWeeks(2),
wed_2011_06_22.plusWeeks(3),
wed_2011_06_22.plusWeeks(4),
wed_2011_06_22.plusWeeks(5),
wed_2011_06_22.plusWeeks(6));
}
@Test
public void weeklyScheduleMoratoriumOverlapsNextWorkingDayHolidayDoesNotIncludeOneScheduledDateShouldShiftTheDate() {
// Second date, 6/29, falls in next-working-day holiday
Holiday oneWeekNextWorkingDayHoliday = new HolidayBuilder()
.from(mon_2011_06_27)
.to(mon_2011_06_27.plusWeeks(1).minusDays(1))
.withNextWorkingDayRule()
.build();
// Moratorium starts on 6/30, overlapping above holiday but does not include the second date.
Holiday twoWeekMoratorium = new HolidayBuilder()
.from(mon_2011_06_27.plusDays(3)) // 6/30
.to(mon_2011_06_27.plusWeeks(1)) // Monday 7/4
.withRepaymentMoratoriumRule()
.build();
/*
* Schedule should start on the next Wednesday (2 days from the starting point).
* The second date is in both holidays, so it and remaining dates are shifted one week. The
* second date is now past the normal holiday.
*/
validateDates(new ScheduleBuilder().withHolidays(oneWeekNextWorkingDayHoliday, twoWeekMoratorium)
.withWeeklyEvent(1, DayOfWeek.wednesday())
.withNumberOfDates(6)
.withStartDate(mon_2011_06_20)
.build(),
wed_2011_06_22,
wed_2011_06_22.plusWeeks(2).withDayOfWeek(DayOfWeek.tuesday()), // Tuesday 7/5 (next working day after moratorium)
wed_2011_06_22.plusWeeks(2), // Wednesday, 7/6
wed_2011_06_22.plusWeeks(3),
wed_2011_06_22.plusWeeks(4),
wed_2011_06_22.plusWeeks(5));
}
/**************************************************
* Tests for schedules recurring monthly by day of month
**************************************************/
@Test
public void monthlyByDayScheduleNoHolidaysShouldNotShiftAnyDates() {
//July & August dates shifted one month past moratorium, then adjusted for working day
validateDates(new ScheduleBuilder().withHolidays()
.withDayOfMonthEvent(1, 6)
.withStartDate(date(2011,6,6))
.withNumberOfDates(10)
.build(),
date(2011, 6, 6), date(2011, 7, 6), date(2011, 8, 8), date(2011, 9, 6), date(2011, 10, 6),
date(2011, 11, 7), date(2011, 12, 6), date(2012, 1, 6), date(2012, 2, 6), date(2012, 3, 6));
}
@Test
public void monthlyByDayScheduleSecondDateInMoratoriumShouldShiftSecondAndThirdDatesbyOneMonth() {
validateDates(new ScheduleBuilder().withHolidays(sevenDayMoratoriumStartingJuly4th)
.withDayOfMonthEvent(1, 6)
.withStartDate(date(2011,6,6))
.withNumberOfDates(3)
.build(),
date(2011, 6, 6), date(2011, 8, 8), date(2011, 9, 6));
}
@Test
public void monthlyByDayScheduleSecondDateInNextWorkingDayHolidayShouldShiftSecondDateToNextWorkingDay() {
Holiday twoWeekNextWorkingDayHoliday = new HolidayBuilder()
.from(date(2011,7,4))
.to(date(2011,7,17))
.withNextWorkingDayRule()
.build();
validateDates(new ScheduleBuilder().withHolidays(twoWeekNextWorkingDayHoliday)
.withDayOfMonthEvent(1, 6)
.withStartDate(date(2011,6,6))
.withNumberOfDates(3)
.build(),
date(2011, 6, 6), date(2011, 7, 18), date(2011, 8, 8));
}
@Test
public void monthlyByDayScheduleSecondDateInNextMeetingHolidayShouldShiftSecondDateByOneMonth() {
Holiday twoWeekNextWorkingDayHoliday = new HolidayBuilder()
.from(date(2011,7,4))
.to(date(2011,7,17))
.withNextMeetingRule()
.build();
validateDates(new ScheduleBuilder().withHolidays(twoWeekNextWorkingDayHoliday)
.withDayOfMonthEvent(1, 6)
.withStartDate(date(2011,6,6))
.withNumberOfDates(3)
.build(),
date(2011, 6, 6), date(2011, 8, 8), date(2011, 8, 8));
}
@Test
public void monthlyByDayScheduleSecondDateInSameDayHolidayShouldNotShiftAnyDates() {
Holiday twoWeekNextWorkingDayHoliday = new HolidayBuilder()
.from(date(2011,7,4))
.to(date(2011,7,17))
.withSameDayAsRule()
.build();
validateDates(new ScheduleBuilder().withHolidays(twoWeekNextWorkingDayHoliday)
.withDayOfMonthEvent(1, 6)
.withStartDate(date(2011,6,6))
.withNumberOfDates(3)
.build(),
date(2011, 6, 6), date(2011, 7, 6), date(2011, 8, 8));
}
@Test
public void monthlyByDayScheduleSecondDateMissesMoratoriumShouldNotShiftAnyDates() {
//Moratorium does not include 13th of month -- no shifts except to adjust for next working day in August
validateDates(new ScheduleBuilder().withHolidays(sevenDayMoratoriumStartingJuly4th)
.withDayOfMonthEvent(1, 13)
.withStartDate(date(2011,6,13))
.withNumberOfDates(3)
.build(),
date(2011, 6, 13), date(2011, 7, 13), date(2011, 8, 15));
}
@Test
public void biMonthlyByDayScheduleNoHolidaysShouldNotShiftAnyDates() {
//July & August dates shifted one month past moratorium, then adjusted for working day
validateDates(new ScheduleBuilder().withHolidays()
.withDayOfMonthEvent(2, 6)
.withStartDate(date(2011, 5, 6))
.withNumberOfDates(4)
.build(),
date(2011, 6, 6), date(2011, 8, 8), date(2011, 10, 6), date(2011, 12, 6));
}
@Test
public void biMonthlyByDayScheduleSecondDateInMoratoriumShouldShiftSecondAndThirdDatesbyTwoMonths() {
validateDates(new ScheduleBuilder().withHolidays(sevenDayMoratoriumStartingJuly4th)
.withDayOfMonthEvent(2, 6)
.withStartDate(date(2011,5,6))
.withNumberOfDates(3)
.build(),
date(2011, 6, 6), date(2011, 8, 8), date(2011, 10, 6));
}
@Test
public void biMonthlyByDayScheduleSecondDateInNextWorkingDayHolidayShouldShiftSecondDateToNextWorkingDay() {
Holiday twoWeekNextWorkingDayHoliday = new HolidayBuilder()
.from(date(2011,7,4))
.to(date(2011,7,17))
.withNextWorkingDayRule()
.build();
validateDates(new ScheduleBuilder().withHolidays(twoWeekNextWorkingDayHoliday)
.withDayOfMonthEvent(2, 6)
.withStartDate(date(2011,5,6))
.withNumberOfDates(3)
.build(),
date(2011, 6, 6), date(2011, 8, 8), date(2011, 10, 6));
}
@Test
public void biMonthlyByDayScheduleSecondDateInNextMeetingHolidayShouldShiftSecondDateByOneMonth() {
Holiday twoWeekNextWorkingDayHoliday = new HolidayBuilder()
.from(date(2011,7,4))
.to(date(2011,7,17))
.withNextMeetingRule()
.build();
validateDates(new ScheduleBuilder().withHolidays(twoWeekNextWorkingDayHoliday)
.withDayOfMonthEvent(2, 6)
.withStartDate(date(2011,5,6))
.withNumberOfDates(3)
.build(),
date(2011, 6, 6), date(2011, 8, 8), date(2011, 10, 6));
}
@Test
public void biMonthlyByDayScheduleSecondDateInSameDayHolidayShouldNotShiftAnyDates() {
Holiday twoWeekNextWorkingDayHoliday = new HolidayBuilder()
.from(date(2011,7,4))
.to(date(2011,7,17))
.withSameDayAsRule()
.build();
validateDates(new ScheduleBuilder().withHolidays(twoWeekNextWorkingDayHoliday)
.withDayOfMonthEvent(2, 6)
.withStartDate(date(2011,5,6))
.withNumberOfDates(3)
.build(),
date(2011, 6, 6), date(2011, 8, 8), date(2011, 10, 6));
}
@Test
public void biMonthlyByDayScheduleSecondDateMissesMoratoriumShouldNotShiftAnyDates() {
//Moratorium does not include 13th of month -- no shifts except to adjust for next working day in August
validateDates(new ScheduleBuilder().withHolidays(sevenDayMoratoriumStartingJuly4th)
.withDayOfMonthEvent(1, 13)
.withStartDate(date(2011,6,13))
.withNumberOfDates(3)
.build(),
date(2011, 6, 13), date(2011, 7, 13), date(2011, 8, 15));
}
@Test
public void shouldNotGoIntoRecursiveLoopWhenThroughDateOccursMoreThanTenPeriodsFromStartDate() {
MeetingBO meeting = new MeetingBuilder().customerMeeting().weekly().every(1).build();
ScheduledEvent scheduledEvent = ScheduledEventFactory.createScheduledEventFrom(meeting);
DateTime startDate = new DateTime().withYear(2010).withMonthOfYear(4).withDayOfMonth(1).toDateMidnight()
.toDateTime();
DateTime throughDate = new DateTime().withYear(2010).withMonthOfYear(6).withDayOfMonth(21).toDateMidnight()
.toDateTime();
scheduleGeneration.generateScheduledDatesThrough(startDate, throughDate, scheduledEvent, false);
}
@Test
public void shouldNotGoIntoRecursiveLoopWhenThroughDateOccursExactlyTenPeriodsFromStartDate() {
MeetingBO meeting = new MeetingBuilder().customerMeeting().weekly().every(1).build();
ScheduledEvent scheduledEvent = ScheduledEventFactory.createScheduledEventFrom(meeting);
DateTime startDate = new DateTime().withYear(2010).withMonthOfYear(4).withDayOfMonth(1).toDateMidnight()
.toDateTime();
DateTime throughDate = new DateTime().withYear(2010).withMonthOfYear(6).withDayOfMonth(7).toDateMidnight()
.toDateTime();
scheduleGeneration.generateScheduledDatesThrough(startDate, throughDate, scheduledEvent, false);
}
/*******************************
* Helper methods
*******************************/
private void validateDates (List<DateTime> actualDates, DateTime... expectedDates) {
assertThat (actualDates.size(), is (expectedDates.length));
for (short i = 0; i < actualDates.size(); i++) {
assertThat (actualDates.get(i), is (expectedDates[i]));
}
}
private DateTime date (int year, int month, int day) {
return new DateTime().withDate(year, month, day).toDateMidnight().toDateTime();
}
private class ScheduleBuilder {
private List<Holiday> holidays = new ArrayList<Holiday>();
private List<Days> defaultWorkingDays = Arrays.asList(DayOfWeek.mondayAsDay(), DayOfWeek.tuesdayAsDay(),
DayOfWeek.wednesdayAsDay(), DayOfWeek.thursdayAsDay(), DayOfWeek.fridayAsDay());
private List<Days> workingDays = defaultWorkingDays;
private ScheduledEvent scheduledEvent;
private int numberOfDatesToGenerate;
private DateTime startDate;
public List<DateTime> build() {
HolidayAndWorkingDaysAndMoratoriaScheduledDateGeneration generator
= new HolidayAndWorkingDaysAndMoratoriaScheduledDateGeneration(workingDays, holidays);
return generator.generateScheduledDates(numberOfDatesToGenerate, startDate, scheduledEvent, false);
}
public ScheduleBuilder withWeeklyEvent (int recurrence, int dayOfWeek) {
scheduledEvent = new ScheduledEventBuilder().every(recurrence).weeks().on(dayOfWeek).build();
return this;
}
public ScheduleBuilder withDayOfMonthEvent (int recurrence, int dayOfMonth) {
scheduledEvent = new ScheduledEventBuilder().every(recurrence).months().onDayOfMonth(dayOfMonth).build();
return this;
}
public ScheduleBuilder withHolidays(Holiday...upComingHolidays) {
holidays = Arrays.asList(upComingHolidays);
return this;
}
public ScheduleBuilder withWorkingDays (List<Days> days) {
workingDays = days;
return this;
}
public ScheduleBuilder withStartDate (DateTime date) {
this.startDate = date;
return this;
}
public ScheduleBuilder withNumberOfDates (int dateCount) {
this.numberOfDatesToGenerate = dateCount;
return this;
}
}
}
| apache-2.0 |
ederign/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-extensions/kie-wb-common-stunner-lienzo-extensions/src/main/java/org/kie/workbench/common/stunner/lienzo/toolbox/items/AbstractPrimitiveItem.java | 1056 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.lienzo.toolbox.items;
import com.ait.lienzo.client.core.shape.IPrimitive;
import org.kie.workbench.common.stunner.lienzo.toolbox.AbstractItem;
import org.kie.workbench.common.stunner.lienzo.toolbox.Item;
public abstract class AbstractPrimitiveItem<T extends Item>
extends AbstractItem<T, IPrimitive<?>>
implements Item<T> {
public abstract IPrimitive<?> asPrimitive();
}
| apache-2.0 |
baslr/ArangoDB | 3rdParty/V8/V8-5.0.71.39/test/test262/data/test/language/expressions/instanceof/S11.8.6_A6_T2.js | 540 | // Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
info: >
Only Function objects implement [[HasInstance]] and can be proper
ShiftExpression for the "instanceof" operator consequently
es5id: 11.8.6_A6_T2
description: Checking Math case
---*/
//CHECK#1
try{
1 instanceof Math;
$ERROR('#1: 1 instanceof Math throw TypeError');
}
catch(e){
if (e instanceof TypeError !== true) {
$ERROR('#1: 1 instanceof Math throw TypeError');
}
}
| apache-2.0 |
ddd332/presto | presto-main/src/main/java/com/facebook/presto/execution/QueryStats.java | 10007 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.annotations.VisibleForTesting;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import org.joda.time.DateTime;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
public class QueryStats
{
private final DateTime createTime;
private final DateTime executionStartTime;
private final DateTime lastHeartbeat;
private final DateTime endTime;
private final Duration elapsedTime;
private final Duration queuedTime;
private final Duration analysisTime;
private final Duration distributedPlanningTime;
private final int totalTasks;
private final int runningTasks;
private final int completedTasks;
private final int totalDrivers;
private final int queuedDrivers;
private final int runningDrivers;
private final int completedDrivers;
private final DataSize totalMemoryReservation;
private final Duration totalScheduledTime;
private final Duration totalCpuTime;
private final Duration totalUserTime;
private final Duration totalBlockedTime;
private final DataSize rawInputDataSize;
private final long rawInputPositions;
private final DataSize processedInputDataSize;
private final long processedInputPositions;
private final DataSize outputDataSize;
private final long outputPositions;
@VisibleForTesting
public QueryStats()
{
this.createTime = null;
this.executionStartTime = null;
this.lastHeartbeat = null;
this.endTime = null;
this.elapsedTime = null;
this.queuedTime = null;
this.analysisTime = null;
this.distributedPlanningTime = null;
this.totalTasks = 0;
this.runningTasks = 0;
this.completedTasks = 0;
this.totalDrivers = 0;
this.queuedDrivers = 0;
this.runningDrivers = 0;
this.completedDrivers = 0;
this.totalMemoryReservation = null;
this.totalScheduledTime = null;
this.totalCpuTime = null;
this.totalUserTime = null;
this.totalBlockedTime = null;
this.rawInputDataSize = null;
this.rawInputPositions = 0;
this.processedInputDataSize = null;
this.processedInputPositions = 0;
this.outputDataSize = null;
this.outputPositions = 0;
}
@JsonCreator
public QueryStats(
@JsonProperty("createTime") DateTime createTime,
@JsonProperty("executionStartTime") DateTime executionStartTime,
@JsonProperty("lastHeartbeat") DateTime lastHeartbeat,
@JsonProperty("endTime") DateTime endTime,
@JsonProperty("elapsedTime") Duration elapsedTime,
@JsonProperty("queuedTime") Duration queuedTime,
@JsonProperty("analysisTime") Duration analysisTime,
@JsonProperty("distributedPlanningTime") Duration distributedPlanningTime,
@JsonProperty("totalTasks") int totalTasks,
@JsonProperty("runningTasks") int runningTasks,
@JsonProperty("completedTasks") int completedTasks,
@JsonProperty("totalDrivers") int totalDrivers,
@JsonProperty("queuedDrivers") int queuedDrivers,
@JsonProperty("runningDrivers") int runningDrivers,
@JsonProperty("completedDrivers") int completedDrivers,
@JsonProperty("totalMemoryReservation") DataSize totalMemoryReservation,
@JsonProperty("totalScheduledTime") Duration totalScheduledTime,
@JsonProperty("totalCpuTime") Duration totalCpuTime,
@JsonProperty("totalUserTime") Duration totalUserTime,
@JsonProperty("totalBlockedTime") Duration totalBlockedTime,
@JsonProperty("rawInputDataSize") DataSize rawInputDataSize,
@JsonProperty("rawInputPositions") long rawInputPositions,
@JsonProperty("processedInputDataSize") DataSize processedInputDataSize,
@JsonProperty("processedInputPositions") long processedInputPositions,
@JsonProperty("outputDataSize") DataSize outputDataSize,
@JsonProperty("outputPositions") long outputPositions)
{
this.createTime = checkNotNull(createTime, "createTime is null");
this.executionStartTime = executionStartTime;
this.lastHeartbeat = checkNotNull(lastHeartbeat, "lastHeartbeat is null");
this.endTime = endTime;
this.elapsedTime = elapsedTime;
this.queuedTime = queuedTime;
this.analysisTime = analysisTime;
this.distributedPlanningTime = distributedPlanningTime;
checkArgument(totalTasks >= 0, "totalTasks is negative");
this.totalTasks = totalTasks;
checkArgument(runningTasks >= 0, "runningTasks is negative");
this.runningTasks = runningTasks;
checkArgument(completedTasks >= 0, "completedTasks is negative");
this.completedTasks = completedTasks;
checkArgument(totalDrivers >= 0, "totalDrivers is negative");
this.totalDrivers = totalDrivers;
checkArgument(queuedDrivers >= 0, "queuedDrivers is negative");
this.queuedDrivers = queuedDrivers;
checkArgument(runningDrivers >= 0, "runningDrivers is negative");
this.runningDrivers = runningDrivers;
checkArgument(completedDrivers >= 0, "completedDrivers is negative");
this.completedDrivers = completedDrivers;
this.totalMemoryReservation = checkNotNull(totalMemoryReservation, "totalMemoryReservation is null");
this.totalScheduledTime = checkNotNull(totalScheduledTime, "totalScheduledTime is null");
this.totalCpuTime = checkNotNull(totalCpuTime, "totalCpuTime is null");
this.totalUserTime = checkNotNull(totalUserTime, "totalUserTime is null");
this.totalBlockedTime = checkNotNull(totalBlockedTime, "totalBlockedTime is null");
this.rawInputDataSize = checkNotNull(rawInputDataSize, "rawInputDataSize is null");
checkArgument(rawInputPositions >= 0, "rawInputPositions is negative");
this.rawInputPositions = rawInputPositions;
this.processedInputDataSize = checkNotNull(processedInputDataSize, "processedInputDataSize is null");
checkArgument(processedInputPositions >= 0, "processedInputPositions is negative");
this.processedInputPositions = processedInputPositions;
this.outputDataSize = checkNotNull(outputDataSize, "outputDataSize is null");
checkArgument(outputPositions >= 0, "outputPositions is negative");
this.outputPositions = outputPositions;
}
@JsonProperty
public DateTime getCreateTime()
{
return createTime;
}
@JsonProperty
public DateTime getExecutionStartTime()
{
return executionStartTime;
}
@JsonProperty
public DateTime getLastHeartbeat()
{
return lastHeartbeat;
}
@JsonProperty
public DateTime getEndTime()
{
return endTime;
}
@JsonProperty
public Duration getElapsedTime()
{
return elapsedTime;
}
@JsonProperty
public Duration getQueuedTime()
{
return queuedTime;
}
@JsonProperty
public Duration getAnalysisTime()
{
return analysisTime;
}
@JsonProperty
public Duration getDistributedPlanningTime()
{
return distributedPlanningTime;
}
@JsonProperty
public int getTotalTasks()
{
return totalTasks;
}
@JsonProperty
public int getRunningTasks()
{
return runningTasks;
}
@JsonProperty
public int getCompletedTasks()
{
return completedTasks;
}
@JsonProperty
public int getTotalDrivers()
{
return totalDrivers;
}
@JsonProperty
public int getQueuedDrivers()
{
return queuedDrivers;
}
@JsonProperty
public int getRunningDrivers()
{
return runningDrivers;
}
@JsonProperty
public int getCompletedDrivers()
{
return completedDrivers;
}
@JsonProperty
public DataSize getTotalMemoryReservation()
{
return totalMemoryReservation;
}
@JsonProperty
public Duration getTotalScheduledTime()
{
return totalScheduledTime;
}
@JsonProperty
public Duration getTotalCpuTime()
{
return totalCpuTime;
}
@JsonProperty
public Duration getTotalUserTime()
{
return totalUserTime;
}
@JsonProperty
public Duration getTotalBlockedTime()
{
return totalBlockedTime;
}
@JsonProperty
public DataSize getRawInputDataSize()
{
return rawInputDataSize;
}
@JsonProperty
public long getRawInputPositions()
{
return rawInputPositions;
}
@JsonProperty
public DataSize getProcessedInputDataSize()
{
return processedInputDataSize;
}
@JsonProperty
public long getProcessedInputPositions()
{
return processedInputPositions;
}
@JsonProperty
public DataSize getOutputDataSize()
{
return outputDataSize;
}
@JsonProperty
public long getOutputPositions()
{
return outputPositions;
}
}
| apache-2.0 |
dslab-epfl/asap | lib/Transforms/IPO/DeadArgumentElimination.cpp | 44780 | //===-- DeadArgumentElimination.cpp - Eliminate dead arguments ------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This pass deletes dead arguments from internal functions. Dead argument
// elimination removes arguments which are directly dead, as well as arguments
// only passed into function calls as dead arguments of other functions. This
// pass also deletes dead return values in a similar way.
//
// This pass is often useful as a cleanup pass to run after aggressive
// interprocedural passes, which add possibly-dead arguments or return values.
//
//===----------------------------------------------------------------------===//
#include "llvm/Transforms/IPO.h"
#include "llvm/ADT/DenseMap.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/ADT/Statistic.h"
#include "llvm/ADT/StringExtras.h"
#include "llvm/IR/CallSite.h"
#include "llvm/IR/CallingConv.h"
#include "llvm/IR/Constant.h"
#include "llvm/IR/DIBuilder.h"
#include "llvm/IR/DebugInfo.h"
#include "llvm/IR/DerivedTypes.h"
#include "llvm/IR/Instructions.h"
#include "llvm/IR/IntrinsicInst.h"
#include "llvm/IR/LLVMContext.h"
#include "llvm/IR/Module.h"
#include "llvm/Pass.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/raw_ostream.h"
#include <map>
#include <set>
#include <tuple>
using namespace llvm;
#define DEBUG_TYPE "deadargelim"
STATISTIC(NumArgumentsEliminated, "Number of unread args removed");
STATISTIC(NumRetValsEliminated , "Number of unused return values removed");
STATISTIC(NumArgumentsReplacedWithUndef,
"Number of unread args replaced with undef");
namespace {
/// DAE - The dead argument elimination pass.
///
class DAE : public ModulePass {
public:
/// Struct that represents (part of) either a return value or a function
/// argument. Used so that arguments and return values can be used
/// interchangeably.
struct RetOrArg {
RetOrArg(const Function *F, unsigned Idx, bool IsArg) : F(F), Idx(Idx),
IsArg(IsArg) {}
const Function *F;
unsigned Idx;
bool IsArg;
/// Make RetOrArg comparable, so we can put it into a map.
bool operator<(const RetOrArg &O) const {
return std::tie(F, Idx, IsArg) < std::tie(O.F, O.Idx, O.IsArg);
}
/// Make RetOrArg comparable, so we can easily iterate the multimap.
bool operator==(const RetOrArg &O) const {
return F == O.F && Idx == O.Idx && IsArg == O.IsArg;
}
std::string getDescription() const {
return (Twine(IsArg ? "Argument #" : "Return value #") + utostr(Idx) +
" of function " + F->getName()).str();
}
};
/// Liveness enum - During our initial pass over the program, we determine
/// that things are either alive or maybe alive. We don't mark anything
/// explicitly dead (even if we know they are), since anything not alive
/// with no registered uses (in Uses) will never be marked alive and will
/// thus become dead in the end.
enum Liveness { Live, MaybeLive };
/// Convenience wrapper
RetOrArg CreateRet(const Function *F, unsigned Idx) {
return RetOrArg(F, Idx, false);
}
/// Convenience wrapper
RetOrArg CreateArg(const Function *F, unsigned Idx) {
return RetOrArg(F, Idx, true);
}
typedef std::multimap<RetOrArg, RetOrArg> UseMap;
/// This maps a return value or argument to any MaybeLive return values or
/// arguments it uses. This allows the MaybeLive values to be marked live
/// when any of its users is marked live.
/// For example (indices are left out for clarity):
/// - Uses[ret F] = ret G
/// This means that F calls G, and F returns the value returned by G.
/// - Uses[arg F] = ret G
/// This means that some function calls G and passes its result as an
/// argument to F.
/// - Uses[ret F] = arg F
/// This means that F returns one of its own arguments.
/// - Uses[arg F] = arg G
/// This means that G calls F and passes one of its own (G's) arguments
/// directly to F.
UseMap Uses;
typedef std::set<RetOrArg> LiveSet;
typedef std::set<const Function*> LiveFuncSet;
/// This set contains all values that have been determined to be live.
LiveSet LiveValues;
/// This set contains all values that are cannot be changed in any way.
LiveFuncSet LiveFunctions;
typedef SmallVector<RetOrArg, 5> UseVector;
// Map each LLVM function to corresponding metadata with debug info. If
// the function is replaced with another one, we should patch the pointer
// to LLVM function in metadata.
// As the code generation for module is finished (and DIBuilder is
// finalized) we assume that subprogram descriptors won't be changed, and
// they are stored in map for short duration anyway.
DenseMap<const Function *, DISubprogram *> FunctionDIs;
protected:
// DAH uses this to specify a different ID.
explicit DAE(char &ID) : ModulePass(ID) {}
public:
static char ID; // Pass identification, replacement for typeid
DAE() : ModulePass(ID) {
initializeDAEPass(*PassRegistry::getPassRegistry());
}
bool runOnModule(Module &M) override;
virtual bool ShouldHackArguments() const { return false; }
private:
Liveness MarkIfNotLive(RetOrArg Use, UseVector &MaybeLiveUses);
Liveness SurveyUse(const Use *U, UseVector &MaybeLiveUses,
unsigned RetValNum = -1U);
Liveness SurveyUses(const Value *V, UseVector &MaybeLiveUses);
void SurveyFunction(const Function &F);
void MarkValue(const RetOrArg &RA, Liveness L,
const UseVector &MaybeLiveUses);
void MarkLive(const RetOrArg &RA);
void MarkLive(const Function &F);
void PropagateLiveness(const RetOrArg &RA);
bool RemoveDeadStuffFromFunction(Function *F);
bool DeleteDeadVarargs(Function &Fn);
bool RemoveDeadArgumentsFromCallers(Function &Fn);
};
}
char DAE::ID = 0;
INITIALIZE_PASS(DAE, "deadargelim", "Dead Argument Elimination", false, false)
namespace {
/// DAH - DeadArgumentHacking pass - Same as dead argument elimination, but
/// deletes arguments to functions which are external. This is only for use
/// by bugpoint.
struct DAH : public DAE {
static char ID;
DAH() : DAE(ID) {}
bool ShouldHackArguments() const override { return true; }
};
}
char DAH::ID = 0;
INITIALIZE_PASS(DAH, "deadarghaX0r",
"Dead Argument Hacking (BUGPOINT USE ONLY; DO NOT USE)",
false, false)
/// createDeadArgEliminationPass - This pass removes arguments from functions
/// which are not used by the body of the function.
///
ModulePass *llvm::createDeadArgEliminationPass() { return new DAE(); }
ModulePass *llvm::createDeadArgHackingPass() { return new DAH(); }
/// DeleteDeadVarargs - If this is an function that takes a ... list, and if
/// llvm.vastart is never called, the varargs list is dead for the function.
bool DAE::DeleteDeadVarargs(Function &Fn) {
assert(Fn.getFunctionType()->isVarArg() && "Function isn't varargs!");
if (Fn.isDeclaration() || !Fn.hasLocalLinkage()) return false;
// Ensure that the function is only directly called.
if (Fn.hasAddressTaken())
return false;
// Okay, we know we can transform this function if safe. Scan its body
// looking for calls marked musttail or calls to llvm.vastart.
for (Function::iterator BB = Fn.begin(), E = Fn.end(); BB != E; ++BB) {
for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
CallInst *CI = dyn_cast<CallInst>(I);
if (!CI)
continue;
if (CI->isMustTailCall())
return false;
if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CI)) {
if (II->getIntrinsicID() == Intrinsic::vastart)
return false;
}
}
}
// If we get here, there are no calls to llvm.vastart in the function body,
// remove the "..." and adjust all the calls.
// Start by computing a new prototype for the function, which is the same as
// the old function, but doesn't have isVarArg set.
FunctionType *FTy = Fn.getFunctionType();
std::vector<Type*> Params(FTy->param_begin(), FTy->param_end());
FunctionType *NFTy = FunctionType::get(FTy->getReturnType(),
Params, false);
unsigned NumArgs = Params.size();
// Create the new function body and insert it into the module...
Function *NF = Function::Create(NFTy, Fn.getLinkage());
NF->copyAttributesFrom(&Fn);
Fn.getParent()->getFunctionList().insert(&Fn, NF);
NF->takeName(&Fn);
// Loop over all of the callers of the function, transforming the call sites
// to pass in a smaller number of arguments into the new function.
//
std::vector<Value*> Args;
for (Value::user_iterator I = Fn.user_begin(), E = Fn.user_end(); I != E; ) {
CallSite CS(*I++);
if (!CS)
continue;
Instruction *Call = CS.getInstruction();
// Pass all the same arguments.
Args.assign(CS.arg_begin(), CS.arg_begin() + NumArgs);
// Drop any attributes that were on the vararg arguments.
AttributeSet PAL = CS.getAttributes();
if (!PAL.isEmpty() && PAL.getSlotIndex(PAL.getNumSlots() - 1) > NumArgs) {
SmallVector<AttributeSet, 8> AttributesVec;
for (unsigned i = 0; PAL.getSlotIndex(i) <= NumArgs; ++i)
AttributesVec.push_back(PAL.getSlotAttributes(i));
if (PAL.hasAttributes(AttributeSet::FunctionIndex))
AttributesVec.push_back(AttributeSet::get(Fn.getContext(),
PAL.getFnAttributes()));
PAL = AttributeSet::get(Fn.getContext(), AttributesVec);
}
Instruction *New;
if (InvokeInst *II = dyn_cast<InvokeInst>(Call)) {
New = InvokeInst::Create(NF, II->getNormalDest(), II->getUnwindDest(),
Args, "", Call);
cast<InvokeInst>(New)->setCallingConv(CS.getCallingConv());
cast<InvokeInst>(New)->setAttributes(PAL);
} else {
New = CallInst::Create(NF, Args, "", Call);
cast<CallInst>(New)->setCallingConv(CS.getCallingConv());
cast<CallInst>(New)->setAttributes(PAL);
if (cast<CallInst>(Call)->isTailCall())
cast<CallInst>(New)->setTailCall();
}
New->setDebugLoc(Call->getDebugLoc());
Args.clear();
if (!Call->use_empty())
Call->replaceAllUsesWith(New);
New->takeName(Call);
// Finally, remove the old call from the program, reducing the use-count of
// F.
Call->eraseFromParent();
}
// Since we have now created the new function, splice the body of the old
// function right into the new function, leaving the old rotting hulk of the
// function empty.
NF->getBasicBlockList().splice(NF->begin(), Fn.getBasicBlockList());
// Loop over the argument list, transferring uses of the old arguments over to
// the new arguments, also transferring over the names as well. While we're at
// it, remove the dead arguments from the DeadArguments list.
//
for (Function::arg_iterator I = Fn.arg_begin(), E = Fn.arg_end(),
I2 = NF->arg_begin(); I != E; ++I, ++I2) {
// Move the name and users over to the new version.
I->replaceAllUsesWith(I2);
I2->takeName(I);
}
// Patch the pointer to LLVM function in debug info descriptor.
auto DI = FunctionDIs.find(&Fn);
if (DI != FunctionDIs.end()) {
DISubprogram *SP = DI->second;
SP->replaceFunction(NF);
// Ensure the map is updated so it can be reused on non-varargs argument
// eliminations of the same function.
FunctionDIs.erase(DI);
FunctionDIs[NF] = SP;
}
// Fix up any BlockAddresses that refer to the function.
Fn.replaceAllUsesWith(ConstantExpr::getBitCast(NF, Fn.getType()));
// Delete the bitcast that we just created, so that NF does not
// appear to be address-taken.
NF->removeDeadConstantUsers();
// Finally, nuke the old function.
Fn.eraseFromParent();
return true;
}
/// RemoveDeadArgumentsFromCallers - Checks if the given function has any
/// arguments that are unused, and changes the caller parameters to be undefined
/// instead.
bool DAE::RemoveDeadArgumentsFromCallers(Function &Fn)
{
// We cannot change the arguments if this TU does not define the function or
// if the linker may choose a function body from another TU, even if the
// nominal linkage indicates that other copies of the function have the same
// semantics. In the below example, the dead load from %p may not have been
// eliminated from the linker-chosen copy of f, so replacing %p with undef
// in callers may introduce undefined behavior.
//
// define linkonce_odr void @f(i32* %p) {
// %v = load i32 %p
// ret void
// }
if (!Fn.isStrongDefinitionForLinker())
return false;
// Functions with local linkage should already have been handled, except the
// fragile (variadic) ones which we can improve here.
if (Fn.hasLocalLinkage() && !Fn.getFunctionType()->isVarArg())
return false;
if (Fn.use_empty())
return false;
SmallVector<unsigned, 8> UnusedArgs;
for (Function::arg_iterator I = Fn.arg_begin(), E = Fn.arg_end();
I != E; ++I) {
Argument *Arg = I;
if (Arg->use_empty() && !Arg->hasByValOrInAllocaAttr())
UnusedArgs.push_back(Arg->getArgNo());
}
if (UnusedArgs.empty())
return false;
bool Changed = false;
for (Use &U : Fn.uses()) {
CallSite CS(U.getUser());
if (!CS || !CS.isCallee(&U))
continue;
// Now go through all unused args and replace them with "undef".
for (unsigned I = 0, E = UnusedArgs.size(); I != E; ++I) {
unsigned ArgNo = UnusedArgs[I];
Value *Arg = CS.getArgument(ArgNo);
CS.setArgument(ArgNo, UndefValue::get(Arg->getType()));
++NumArgumentsReplacedWithUndef;
Changed = true;
}
}
return Changed;
}
/// Convenience function that returns the number of return values. It returns 0
/// for void functions and 1 for functions not returning a struct. It returns
/// the number of struct elements for functions returning a struct.
static unsigned NumRetVals(const Function *F) {
Type *RetTy = F->getReturnType();
if (RetTy->isVoidTy())
return 0;
else if (StructType *STy = dyn_cast<StructType>(RetTy))
return STy->getNumElements();
else if (ArrayType *ATy = dyn_cast<ArrayType>(RetTy))
return ATy->getNumElements();
else
return 1;
}
/// Returns the sub-type a function will return at a given Idx. Should
/// correspond to the result type of an ExtractValue instruction executed with
/// just that one Idx (i.e. only top-level structure is considered).
static Type *getRetComponentType(const Function *F, unsigned Idx) {
Type *RetTy = F->getReturnType();
assert(!RetTy->isVoidTy() && "void type has no subtype");
if (StructType *STy = dyn_cast<StructType>(RetTy))
return STy->getElementType(Idx);
else if (ArrayType *ATy = dyn_cast<ArrayType>(RetTy))
return ATy->getElementType();
else
return RetTy;
}
/// MarkIfNotLive - This checks Use for liveness in LiveValues. If Use is not
/// live, it adds Use to the MaybeLiveUses argument. Returns the determined
/// liveness of Use.
DAE::Liveness DAE::MarkIfNotLive(RetOrArg Use, UseVector &MaybeLiveUses) {
// We're live if our use or its Function is already marked as live.
if (LiveFunctions.count(Use.F) || LiveValues.count(Use))
return Live;
// We're maybe live otherwise, but remember that we must become live if
// Use becomes live.
MaybeLiveUses.push_back(Use);
return MaybeLive;
}
/// SurveyUse - This looks at a single use of an argument or return value
/// and determines if it should be alive or not. Adds this use to MaybeLiveUses
/// if it causes the used value to become MaybeLive.
///
/// RetValNum is the return value number to use when this use is used in a
/// return instruction. This is used in the recursion, you should always leave
/// it at 0.
DAE::Liveness DAE::SurveyUse(const Use *U,
UseVector &MaybeLiveUses, unsigned RetValNum) {
const User *V = U->getUser();
if (const ReturnInst *RI = dyn_cast<ReturnInst>(V)) {
// The value is returned from a function. It's only live when the
// function's return value is live. We use RetValNum here, for the case
// that U is really a use of an insertvalue instruction that uses the
// original Use.
const Function *F = RI->getParent()->getParent();
if (RetValNum != -1U) {
RetOrArg Use = CreateRet(F, RetValNum);
// We might be live, depending on the liveness of Use.
return MarkIfNotLive(Use, MaybeLiveUses);
} else {
DAE::Liveness Result = MaybeLive;
for (unsigned i = 0; i < NumRetVals(F); ++i) {
RetOrArg Use = CreateRet(F, i);
// We might be live, depending on the liveness of Use. If any
// sub-value is live, then the entire value is considered live. This
// is a conservative choice, and better tracking is possible.
DAE::Liveness SubResult = MarkIfNotLive(Use, MaybeLiveUses);
if (Result != Live)
Result = SubResult;
}
return Result;
}
}
if (const InsertValueInst *IV = dyn_cast<InsertValueInst>(V)) {
if (U->getOperandNo() != InsertValueInst::getAggregateOperandIndex()
&& IV->hasIndices())
// The use we are examining is inserted into an aggregate. Our liveness
// depends on all uses of that aggregate, but if it is used as a return
// value, only index at which we were inserted counts.
RetValNum = *IV->idx_begin();
// Note that if we are used as the aggregate operand to the insertvalue,
// we don't change RetValNum, but do survey all our uses.
Liveness Result = MaybeLive;
for (const Use &UU : IV->uses()) {
Result = SurveyUse(&UU, MaybeLiveUses, RetValNum);
if (Result == Live)
break;
}
return Result;
}
if (auto CS = ImmutableCallSite(V)) {
const Function *F = CS.getCalledFunction();
if (F) {
// Used in a direct call.
// Find the argument number. We know for sure that this use is an
// argument, since if it was the function argument this would be an
// indirect call and the we know can't be looking at a value of the
// label type (for the invoke instruction).
unsigned ArgNo = CS.getArgumentNo(U);
if (ArgNo >= F->getFunctionType()->getNumParams())
// The value is passed in through a vararg! Must be live.
return Live;
assert(CS.getArgument(ArgNo)
== CS->getOperand(U->getOperandNo())
&& "Argument is not where we expected it");
// Value passed to a normal call. It's only live when the corresponding
// argument to the called function turns out live.
RetOrArg Use = CreateArg(F, ArgNo);
return MarkIfNotLive(Use, MaybeLiveUses);
}
}
// Used in any other way? Value must be live.
return Live;
}
/// SurveyUses - This looks at all the uses of the given value
/// Returns the Liveness deduced from the uses of this value.
///
/// Adds all uses that cause the result to be MaybeLive to MaybeLiveRetUses. If
/// the result is Live, MaybeLiveUses might be modified but its content should
/// be ignored (since it might not be complete).
DAE::Liveness DAE::SurveyUses(const Value *V, UseVector &MaybeLiveUses) {
// Assume it's dead (which will only hold if there are no uses at all..).
Liveness Result = MaybeLive;
// Check each use.
for (const Use &U : V->uses()) {
Result = SurveyUse(&U, MaybeLiveUses);
if (Result == Live)
break;
}
return Result;
}
// SurveyFunction - This performs the initial survey of the specified function,
// checking out whether or not it uses any of its incoming arguments or whether
// any callers use the return value. This fills in the LiveValues set and Uses
// map.
//
// We consider arguments of non-internal functions to be intrinsically alive as
// well as arguments to functions which have their "address taken".
//
void DAE::SurveyFunction(const Function &F) {
// Functions with inalloca parameters are expecting args in a particular
// register and memory layout.
if (F.getAttributes().hasAttrSomewhere(Attribute::InAlloca)) {
MarkLive(F);
return;
}
unsigned RetCount = NumRetVals(&F);
// Assume all return values are dead
typedef SmallVector<Liveness, 5> RetVals;
RetVals RetValLiveness(RetCount, MaybeLive);
typedef SmallVector<UseVector, 5> RetUses;
// These vectors map each return value to the uses that make it MaybeLive, so
// we can add those to the Uses map if the return value really turns out to be
// MaybeLive. Initialized to a list of RetCount empty lists.
RetUses MaybeLiveRetUses(RetCount);
for (Function::const_iterator BB = F.begin(), E = F.end(); BB != E; ++BB)
if (const ReturnInst *RI = dyn_cast<ReturnInst>(BB->getTerminator()))
if (RI->getNumOperands() != 0 && RI->getOperand(0)->getType()
!= F.getFunctionType()->getReturnType()) {
// We don't support old style multiple return values.
MarkLive(F);
return;
}
if (!F.hasLocalLinkage() && (!ShouldHackArguments() || F.isIntrinsic())) {
MarkLive(F);
return;
}
DEBUG(dbgs() << "DAE - Inspecting callers for fn: " << F.getName() << "\n");
// Keep track of the number of live retvals, so we can skip checks once all
// of them turn out to be live.
unsigned NumLiveRetVals = 0;
// Loop all uses of the function.
for (const Use &U : F.uses()) {
// If the function is PASSED IN as an argument, its address has been
// taken.
ImmutableCallSite CS(U.getUser());
if (!CS || !CS.isCallee(&U)) {
MarkLive(F);
return;
}
// If this use is anything other than a call site, the function is alive.
const Instruction *TheCall = CS.getInstruction();
if (!TheCall) { // Not a direct call site?
MarkLive(F);
return;
}
// If we end up here, we are looking at a direct call to our function.
// Now, check how our return value(s) is/are used in this caller. Don't
// bother checking return values if all of them are live already.
if (NumLiveRetVals == RetCount)
continue;
// Check all uses of the return value.
for (const Use &U : TheCall->uses()) {
if (ExtractValueInst *Ext = dyn_cast<ExtractValueInst>(U.getUser())) {
// This use uses a part of our return value, survey the uses of
// that part and store the results for this index only.
unsigned Idx = *Ext->idx_begin();
if (RetValLiveness[Idx] != Live) {
RetValLiveness[Idx] = SurveyUses(Ext, MaybeLiveRetUses[Idx]);
if (RetValLiveness[Idx] == Live)
NumLiveRetVals++;
}
} else {
// Used by something else than extractvalue. Survey, but assume that the
// result applies to all sub-values.
UseVector MaybeLiveAggregateUses;
if (SurveyUse(&U, MaybeLiveAggregateUses) == Live) {
NumLiveRetVals = RetCount;
RetValLiveness.assign(RetCount, Live);
break;
} else {
for (unsigned i = 0; i != RetCount; ++i) {
if (RetValLiveness[i] != Live)
MaybeLiveRetUses[i].append(MaybeLiveAggregateUses.begin(),
MaybeLiveAggregateUses.end());
}
}
}
}
}
// Now we've inspected all callers, record the liveness of our return values.
for (unsigned i = 0; i != RetCount; ++i)
MarkValue(CreateRet(&F, i), RetValLiveness[i], MaybeLiveRetUses[i]);
DEBUG(dbgs() << "DAE - Inspecting args for fn: " << F.getName() << "\n");
// Now, check all of our arguments.
unsigned i = 0;
UseVector MaybeLiveArgUses;
for (Function::const_arg_iterator AI = F.arg_begin(),
E = F.arg_end(); AI != E; ++AI, ++i) {
Liveness Result;
if (F.getFunctionType()->isVarArg()) {
// Variadic functions will already have a va_arg function expanded inside
// them, making them potentially very sensitive to ABI changes resulting
// from removing arguments entirely, so don't. For example AArch64 handles
// register and stack HFAs very differently, and this is reflected in the
// IR which has already been generated.
Result = Live;
} else {
// See what the effect of this use is (recording any uses that cause
// MaybeLive in MaybeLiveArgUses).
Result = SurveyUses(AI, MaybeLiveArgUses);
}
// Mark the result.
MarkValue(CreateArg(&F, i), Result, MaybeLiveArgUses);
// Clear the vector again for the next iteration.
MaybeLiveArgUses.clear();
}
}
/// MarkValue - This function marks the liveness of RA depending on L. If L is
/// MaybeLive, it also takes all uses in MaybeLiveUses and records them in Uses,
/// such that RA will be marked live if any use in MaybeLiveUses gets marked
/// live later on.
void DAE::MarkValue(const RetOrArg &RA, Liveness L,
const UseVector &MaybeLiveUses) {
switch (L) {
case Live: MarkLive(RA); break;
case MaybeLive:
{
// Note any uses of this value, so this return value can be
// marked live whenever one of the uses becomes live.
for (UseVector::const_iterator UI = MaybeLiveUses.begin(),
UE = MaybeLiveUses.end(); UI != UE; ++UI)
Uses.insert(std::make_pair(*UI, RA));
break;
}
}
}
/// MarkLive - Mark the given Function as alive, meaning that it cannot be
/// changed in any way. Additionally,
/// mark any values that are used as this function's parameters or by its return
/// values (according to Uses) live as well.
void DAE::MarkLive(const Function &F) {
DEBUG(dbgs() << "DAE - Intrinsically live fn: " << F.getName() << "\n");
// Mark the function as live.
LiveFunctions.insert(&F);
// Mark all arguments as live.
for (unsigned i = 0, e = F.arg_size(); i != e; ++i)
PropagateLiveness(CreateArg(&F, i));
// Mark all return values as live.
for (unsigned i = 0, e = NumRetVals(&F); i != e; ++i)
PropagateLiveness(CreateRet(&F, i));
}
/// MarkLive - Mark the given return value or argument as live. Additionally,
/// mark any values that are used by this value (according to Uses) live as
/// well.
void DAE::MarkLive(const RetOrArg &RA) {
if (LiveFunctions.count(RA.F))
return; // Function was already marked Live.
if (!LiveValues.insert(RA).second)
return; // We were already marked Live.
DEBUG(dbgs() << "DAE - Marking " << RA.getDescription() << " live\n");
PropagateLiveness(RA);
}
/// PropagateLiveness - Given that RA is a live value, propagate it's liveness
/// to any other values it uses (according to Uses).
void DAE::PropagateLiveness(const RetOrArg &RA) {
// We don't use upper_bound (or equal_range) here, because our recursive call
// to ourselves is likely to cause the upper_bound (which is the first value
// not belonging to RA) to become erased and the iterator invalidated.
UseMap::iterator Begin = Uses.lower_bound(RA);
UseMap::iterator E = Uses.end();
UseMap::iterator I;
for (I = Begin; I != E && I->first == RA; ++I)
MarkLive(I->second);
// Erase RA from the Uses map (from the lower bound to wherever we ended up
// after the loop).
Uses.erase(Begin, I);
}
// RemoveDeadStuffFromFunction - Remove any arguments and return values from F
// that are not in LiveValues. Transform the function and all of the callees of
// the function to not have these arguments and return values.
//
bool DAE::RemoveDeadStuffFromFunction(Function *F) {
// Don't modify fully live functions
if (LiveFunctions.count(F))
return false;
// Start by computing a new prototype for the function, which is the same as
// the old function, but has fewer arguments and a different return type.
FunctionType *FTy = F->getFunctionType();
std::vector<Type*> Params;
// Keep track of if we have a live 'returned' argument
bool HasLiveReturnedArg = false;
// Set up to build a new list of parameter attributes.
SmallVector<AttributeSet, 8> AttributesVec;
const AttributeSet &PAL = F->getAttributes();
// Remember which arguments are still alive.
SmallVector<bool, 10> ArgAlive(FTy->getNumParams(), false);
// Construct the new parameter list from non-dead arguments. Also construct
// a new set of parameter attributes to correspond. Skip the first parameter
// attribute, since that belongs to the return value.
unsigned i = 0;
for (Function::arg_iterator I = F->arg_begin(), E = F->arg_end();
I != E; ++I, ++i) {
RetOrArg Arg = CreateArg(F, i);
if (LiveValues.erase(Arg)) {
Params.push_back(I->getType());
ArgAlive[i] = true;
// Get the original parameter attributes (skipping the first one, that is
// for the return value.
if (PAL.hasAttributes(i + 1)) {
AttrBuilder B(PAL, i + 1);
if (B.contains(Attribute::Returned))
HasLiveReturnedArg = true;
AttributesVec.
push_back(AttributeSet::get(F->getContext(), Params.size(), B));
}
} else {
++NumArgumentsEliminated;
DEBUG(dbgs() << "DAE - Removing argument " << i << " (" << I->getName()
<< ") from " << F->getName() << "\n");
}
}
// Find out the new return value.
Type *RetTy = FTy->getReturnType();
Type *NRetTy = nullptr;
unsigned RetCount = NumRetVals(F);
// -1 means unused, other numbers are the new index
SmallVector<int, 5> NewRetIdxs(RetCount, -1);
std::vector<Type*> RetTypes;
// If there is a function with a live 'returned' argument but a dead return
// value, then there are two possible actions:
// 1) Eliminate the return value and take off the 'returned' attribute on the
// argument.
// 2) Retain the 'returned' attribute and treat the return value (but not the
// entire function) as live so that it is not eliminated.
//
// It's not clear in the general case which option is more profitable because,
// even in the absence of explicit uses of the return value, code generation
// is free to use the 'returned' attribute to do things like eliding
// save/restores of registers across calls. Whether or not this happens is
// target and ABI-specific as well as depending on the amount of register
// pressure, so there's no good way for an IR-level pass to figure this out.
//
// Fortunately, the only places where 'returned' is currently generated by
// the FE are places where 'returned' is basically free and almost always a
// performance win, so the second option can just be used always for now.
//
// This should be revisited if 'returned' is ever applied more liberally.
if (RetTy->isVoidTy() || HasLiveReturnedArg) {
NRetTy = RetTy;
} else {
// Look at each of the original return values individually.
for (unsigned i = 0; i != RetCount; ++i) {
RetOrArg Ret = CreateRet(F, i);
if (LiveValues.erase(Ret)) {
RetTypes.push_back(getRetComponentType(F, i));
NewRetIdxs[i] = RetTypes.size() - 1;
} else {
++NumRetValsEliminated;
DEBUG(dbgs() << "DAE - Removing return value " << i << " from "
<< F->getName() << "\n");
}
}
if (RetTypes.size() > 1) {
// More than one return type? Reduce it down to size.
if (StructType *STy = dyn_cast<StructType>(RetTy)) {
// Make the new struct packed if we used to return a packed struct
// already.
NRetTy = StructType::get(STy->getContext(), RetTypes, STy->isPacked());
} else {
assert(isa<ArrayType>(RetTy) && "unexpected multi-value return");
NRetTy = ArrayType::get(RetTypes[0], RetTypes.size());
}
} else if (RetTypes.size() == 1)
// One return type? Just a simple value then, but only if we didn't use to
// return a struct with that simple value before.
NRetTy = RetTypes.front();
else if (RetTypes.size() == 0)
// No return types? Make it void, but only if we didn't use to return {}.
NRetTy = Type::getVoidTy(F->getContext());
}
assert(NRetTy && "No new return type found?");
// The existing function return attributes.
AttributeSet RAttrs = PAL.getRetAttributes();
// Remove any incompatible attributes, but only if we removed all return
// values. Otherwise, ensure that we don't have any conflicting attributes
// here. Currently, this should not be possible, but special handling might be
// required when new return value attributes are added.
if (NRetTy->isVoidTy())
RAttrs = RAttrs.removeAttributes(NRetTy->getContext(),
AttributeSet::ReturnIndex,
AttributeFuncs::typeIncompatible(NRetTy));
else
assert(!AttrBuilder(RAttrs, AttributeSet::ReturnIndex).
overlaps(AttributeFuncs::typeIncompatible(NRetTy)) &&
"Return attributes no longer compatible?");
if (RAttrs.hasAttributes(AttributeSet::ReturnIndex))
AttributesVec.push_back(AttributeSet::get(NRetTy->getContext(), RAttrs));
if (PAL.hasAttributes(AttributeSet::FunctionIndex))
AttributesVec.push_back(AttributeSet::get(F->getContext(),
PAL.getFnAttributes()));
// Reconstruct the AttributesList based on the vector we constructed.
AttributeSet NewPAL = AttributeSet::get(F->getContext(), AttributesVec);
// Create the new function type based on the recomputed parameters.
FunctionType *NFTy = FunctionType::get(NRetTy, Params, FTy->isVarArg());
// No change?
if (NFTy == FTy)
return false;
// Create the new function body and insert it into the module...
Function *NF = Function::Create(NFTy, F->getLinkage());
NF->copyAttributesFrom(F);
NF->setAttributes(NewPAL);
// Insert the new function before the old function, so we won't be processing
// it again.
F->getParent()->getFunctionList().insert(F, NF);
NF->takeName(F);
// Loop over all of the callers of the function, transforming the call sites
// to pass in a smaller number of arguments into the new function.
//
std::vector<Value*> Args;
while (!F->use_empty()) {
CallSite CS(F->user_back());
Instruction *Call = CS.getInstruction();
AttributesVec.clear();
const AttributeSet &CallPAL = CS.getAttributes();
// The call return attributes.
AttributeSet RAttrs = CallPAL.getRetAttributes();
// Adjust in case the function was changed to return void.
RAttrs = RAttrs.removeAttributes(NRetTy->getContext(),
AttributeSet::ReturnIndex,
AttributeFuncs::typeIncompatible(NF->getReturnType()));
if (RAttrs.hasAttributes(AttributeSet::ReturnIndex))
AttributesVec.push_back(AttributeSet::get(NF->getContext(), RAttrs));
// Declare these outside of the loops, so we can reuse them for the second
// loop, which loops the varargs.
CallSite::arg_iterator I = CS.arg_begin();
unsigned i = 0;
// Loop over those operands, corresponding to the normal arguments to the
// original function, and add those that are still alive.
for (unsigned e = FTy->getNumParams(); i != e; ++I, ++i)
if (ArgAlive[i]) {
Args.push_back(*I);
// Get original parameter attributes, but skip return attributes.
if (CallPAL.hasAttributes(i + 1)) {
AttrBuilder B(CallPAL, i + 1);
// If the return type has changed, then get rid of 'returned' on the
// call site. The alternative is to make all 'returned' attributes on
// call sites keep the return value alive just like 'returned'
// attributes on function declaration but it's less clearly a win
// and this is not an expected case anyway
if (NRetTy != RetTy && B.contains(Attribute::Returned))
B.removeAttribute(Attribute::Returned);
AttributesVec.
push_back(AttributeSet::get(F->getContext(), Args.size(), B));
}
}
// Push any varargs arguments on the list. Don't forget their attributes.
for (CallSite::arg_iterator E = CS.arg_end(); I != E; ++I, ++i) {
Args.push_back(*I);
if (CallPAL.hasAttributes(i + 1)) {
AttrBuilder B(CallPAL, i + 1);
AttributesVec.
push_back(AttributeSet::get(F->getContext(), Args.size(), B));
}
}
if (CallPAL.hasAttributes(AttributeSet::FunctionIndex))
AttributesVec.push_back(AttributeSet::get(Call->getContext(),
CallPAL.getFnAttributes()));
// Reconstruct the AttributesList based on the vector we constructed.
AttributeSet NewCallPAL = AttributeSet::get(F->getContext(), AttributesVec);
Instruction *New;
if (InvokeInst *II = dyn_cast<InvokeInst>(Call)) {
New = InvokeInst::Create(NF, II->getNormalDest(), II->getUnwindDest(),
Args, "", Call);
cast<InvokeInst>(New)->setCallingConv(CS.getCallingConv());
cast<InvokeInst>(New)->setAttributes(NewCallPAL);
} else {
New = CallInst::Create(NF, Args, "", Call);
cast<CallInst>(New)->setCallingConv(CS.getCallingConv());
cast<CallInst>(New)->setAttributes(NewCallPAL);
if (cast<CallInst>(Call)->isTailCall())
cast<CallInst>(New)->setTailCall();
}
New->setDebugLoc(Call->getDebugLoc());
Args.clear();
if (!Call->use_empty()) {
if (New->getType() == Call->getType()) {
// Return type not changed? Just replace users then.
Call->replaceAllUsesWith(New);
New->takeName(Call);
} else if (New->getType()->isVoidTy()) {
// Our return value has uses, but they will get removed later on.
// Replace by null for now.
if (!Call->getType()->isX86_MMXTy())
Call->replaceAllUsesWith(Constant::getNullValue(Call->getType()));
} else {
assert((RetTy->isStructTy() || RetTy->isArrayTy()) &&
"Return type changed, but not into a void. The old return type"
" must have been a struct or an array!");
Instruction *InsertPt = Call;
if (InvokeInst *II = dyn_cast<InvokeInst>(Call)) {
BasicBlock::iterator IP = II->getNormalDest()->begin();
while (isa<PHINode>(IP)) ++IP;
InsertPt = IP;
}
// We used to return a struct or array. Instead of doing smart stuff
// with all the uses, we will just rebuild it using extract/insertvalue
// chaining and let instcombine clean that up.
//
// Start out building up our return value from undef
Value *RetVal = UndefValue::get(RetTy);
for (unsigned i = 0; i != RetCount; ++i)
if (NewRetIdxs[i] != -1) {
Value *V;
if (RetTypes.size() > 1)
// We are still returning a struct, so extract the value from our
// return value
V = ExtractValueInst::Create(New, NewRetIdxs[i], "newret",
InsertPt);
else
// We are now returning a single element, so just insert that
V = New;
// Insert the value at the old position
RetVal = InsertValueInst::Create(RetVal, V, i, "oldret", InsertPt);
}
// Now, replace all uses of the old call instruction with the return
// struct we built
Call->replaceAllUsesWith(RetVal);
New->takeName(Call);
}
}
// Finally, remove the old call from the program, reducing the use-count of
// F.
Call->eraseFromParent();
}
// Since we have now created the new function, splice the body of the old
// function right into the new function, leaving the old rotting hulk of the
// function empty.
NF->getBasicBlockList().splice(NF->begin(), F->getBasicBlockList());
// Loop over the argument list, transferring uses of the old arguments over to
// the new arguments, also transferring over the names as well.
i = 0;
for (Function::arg_iterator I = F->arg_begin(), E = F->arg_end(),
I2 = NF->arg_begin(); I != E; ++I, ++i)
if (ArgAlive[i]) {
// If this is a live argument, move the name and users over to the new
// version.
I->replaceAllUsesWith(I2);
I2->takeName(I);
++I2;
} else {
// If this argument is dead, replace any uses of it with null constants
// (these are guaranteed to become unused later on).
if (!I->getType()->isX86_MMXTy())
I->replaceAllUsesWith(Constant::getNullValue(I->getType()));
}
// If we change the return value of the function we must rewrite any return
// instructions. Check this now.
if (F->getReturnType() != NF->getReturnType())
for (Function::iterator BB = NF->begin(), E = NF->end(); BB != E; ++BB)
if (ReturnInst *RI = dyn_cast<ReturnInst>(BB->getTerminator())) {
Value *RetVal;
if (NFTy->getReturnType()->isVoidTy()) {
RetVal = nullptr;
} else {
assert(RetTy->isStructTy() || RetTy->isArrayTy());
// The original return value was a struct or array, insert
// extractvalue/insertvalue chains to extract only the values we need
// to return and insert them into our new result.
// This does generate messy code, but we'll let it to instcombine to
// clean that up.
Value *OldRet = RI->getOperand(0);
// Start out building up our return value from undef
RetVal = UndefValue::get(NRetTy);
for (unsigned i = 0; i != RetCount; ++i)
if (NewRetIdxs[i] != -1) {
ExtractValueInst *EV = ExtractValueInst::Create(OldRet, i,
"oldret", RI);
if (RetTypes.size() > 1) {
// We're still returning a struct, so reinsert the value into
// our new return value at the new index
RetVal = InsertValueInst::Create(RetVal, EV, NewRetIdxs[i],
"newret", RI);
} else {
// We are now only returning a simple value, so just return the
// extracted value.
RetVal = EV;
}
}
}
// Replace the return instruction with one returning the new return
// value (possibly 0 if we became void).
ReturnInst::Create(F->getContext(), RetVal, RI);
BB->getInstList().erase(RI);
}
// Patch the pointer to LLVM function in debug info descriptor.
auto DI = FunctionDIs.find(F);
if (DI != FunctionDIs.end())
DI->second->replaceFunction(NF);
// Now that the old function is dead, delete it.
F->eraseFromParent();
return true;
}
bool DAE::runOnModule(Module &M) {
bool Changed = false;
// Collect debug info descriptors for functions.
FunctionDIs = makeSubprogramMap(M);
// First pass: Do a simple check to see if any functions can have their "..."
// removed. We can do this if they never call va_start. This loop cannot be
// fused with the next loop, because deleting a function invalidates
// information computed while surveying other functions.
DEBUG(dbgs() << "DAE - Deleting dead varargs\n");
for (Module::iterator I = M.begin(), E = M.end(); I != E; ) {
Function &F = *I++;
if (F.getFunctionType()->isVarArg())
Changed |= DeleteDeadVarargs(F);
}
// Second phase:loop through the module, determining which arguments are live.
// We assume all arguments are dead unless proven otherwise (allowing us to
// determine that dead arguments passed into recursive functions are dead).
//
DEBUG(dbgs() << "DAE - Determining liveness\n");
for (auto &F : M)
SurveyFunction(F);
// Now, remove all dead arguments and return values from each function in
// turn.
for (Module::iterator I = M.begin(), E = M.end(); I != E; ) {
// Increment now, because the function will probably get removed (ie.
// replaced by a new one).
Function *F = I++;
Changed |= RemoveDeadStuffFromFunction(F);
}
// Finally, look for any unused parameters in functions with non-local
// linkage and replace the passed in parameters with undef.
for (auto &F : M)
Changed |= RemoveDeadArgumentsFromCallers(F);
return Changed;
}
| bsd-2-clause |
mikem/homebrew-cask | Casks/macspice.rb | 415 | cask 'macspice' do
version '3.1.15'
sha256 'c12699e694d415ef7711e45d3f348e84202188014e679d505a2d5f126c84f77c'
url "http://www.macspice.com/mirror/binaries/v#{version}/MacSpice3f5.dmg"
appcast 'http://www.macspice.com/AppCast-v2.xml',
checkpoint: '0d57f4a640d1d4991aab10d13a744be9d2afcec1b9ee1621ea5668f4964eb6a9'
name 'MacSpice'
homepage 'https://www.macspice.com/'
app 'MacSpice.app'
end
| bsd-2-clause |
seebcioo/slick | slick/src/main/scala/slick/ast/Comprehension.scala | 4794 | package slick.ast
import TypeUtil.typeToTypeUtil
import Util._
import slick.util.ConstArray
/** A SQL comprehension */
final case class Comprehension(sym: TermSymbol, from: Node, select: Node, where: Option[Node] = None,
groupBy: Option[Node] = None, orderBy: ConstArray[(Node, Ordering)] = ConstArray.empty,
having: Option[Node] = None,
distinct: Option[Node] = None,
fetch: Option[Node] = None, offset: Option[Node] = None) extends DefNode {
type Self = Comprehension
lazy val children = (ConstArray.newBuilder() + from + select ++ where ++ groupBy ++ orderBy.map(_._1) ++ having ++ distinct ++ fetch ++ offset).result
override def childNames =
Seq("from "+sym, "select") ++
where.map(_ => "where") ++
groupBy.map(_ => "groupBy") ++
orderBy.map("orderBy " + _._2).toSeq ++
having.map(_ => "having") ++
distinct.map(_ => "distinct") ++
fetch.map(_ => "fetch") ++
offset.map(_ => "offset")
protected[this] def rebuild(ch: ConstArray[Node]) = {
val newFrom = ch(0)
val newSelect = ch(1)
val whereOffset = 2
val newWhere = ch.slice(whereOffset, whereOffset + where.productArity)
val groupByOffset = whereOffset + newWhere.length
val newGroupBy = ch.slice(groupByOffset, groupByOffset + groupBy.productArity)
val orderByOffset = groupByOffset + newGroupBy.length
val newOrderBy = ch.slice(orderByOffset, orderByOffset + orderBy.length)
val havingOffset = orderByOffset + newOrderBy.length
val newHaving = ch.slice(havingOffset, havingOffset + having.productArity)
val distinctOffset = havingOffset + newHaving.length
val newDistinct = ch.slice(distinctOffset, distinctOffset + distinct.productArity)
val fetchOffset = distinctOffset + newDistinct.length
val newFetch = ch.slice(fetchOffset, fetchOffset + fetch.productArity)
val offsetOffset = fetchOffset + newFetch.length
val newOffset = ch.slice(offsetOffset, offsetOffset + offset.productArity)
copy(
from = newFrom,
select = newSelect,
where = newWhere.headOption,
groupBy = newGroupBy.headOption,
orderBy = orderBy.zip(newOrderBy).map { case ((_, o), n) => (n, o) },
having = newHaving.headOption,
distinct = newDistinct.headOption,
fetch = newFetch.headOption,
offset = newOffset.headOption
)
}
def generators = ConstArray((sym, from))
protected[this] def rebuildWithSymbols(gen: ConstArray[TermSymbol]) = copy(sym = gen.head)
def withInferredType(scope: Type.Scope, typeChildren: Boolean): Self = {
// Assign type to "from" Node and compute the resulting scope
val f2 = from.infer(scope, typeChildren)
val genScope = scope + (sym -> f2.nodeType.asCollectionType.elementType)
// Assign types to "select", "where", "groupBy", "orderBy", "having", "distinct", "fetch" and "offset" Nodes
val s2 = select.infer(genScope, typeChildren)
val w2 = mapOrNone(where)(_.infer(genScope, typeChildren))
val g2 = mapOrNone(groupBy)(_.infer(genScope, typeChildren))
val o = orderBy.map(_._1)
val o2 = o.endoMap(_.infer(genScope, typeChildren))
val h2 = mapOrNone(having)(_.infer(genScope, typeChildren))
val distinct2 = mapOrNone(distinct)(_.infer(genScope, typeChildren))
val fetch2 = mapOrNone(fetch)(_.infer(genScope, typeChildren))
val offset2 = mapOrNone(offset)(_.infer(genScope, typeChildren))
// Check if the nodes changed
val same = (f2 eq from) && (s2 eq select) && w2.isEmpty && g2.isEmpty && (o2 eq o) && h2.isEmpty &&
distinct2.isEmpty && fetch2.isEmpty && offset2.isEmpty
val newType =
if(!hasType) CollectionType(f2.nodeType.asCollectionType.cons, s2.nodeType.asCollectionType.elementType)
else nodeType
if(same && newType == nodeType) this else {
copy(
from = f2,
select = s2,
where = w2.orElse(where),
groupBy = g2.orElse(groupBy),
orderBy = if(o2 eq o) orderBy else orderBy.zip(o2).map { case ((_, o), n) => (n, o) },
having = h2.orElse(having),
distinct = distinct2.orElse(distinct),
fetch = fetch2.orElse(fetch),
offset = offset2.orElse(offset)
) :@ newType
}
}
}
/** The row_number window function */
final case class RowNumber(by: ConstArray[(Node, Ordering)] = ConstArray.empty) extends SimplyTypedNode {
type Self = RowNumber
def buildType = ScalaBaseType.longType
lazy val children = by.map(_._1)
protected[this] def rebuild(ch: ConstArray[Node]) =
copy(by = by.zip(ch).map{ case ((_, o), n) => (n, o) })
override def childNames = by.zipWithIndex.map("by" + _._2).toSeq
override def getDumpInfo = super.getDumpInfo.copy(mainInfo = "")
}
| bsd-2-clause |
bcg62/homebrew-core | Formula/meson-internal.rb | 1778 | class MesonInternal < Formula
include Language::Python::Virtualenv
desc "Fast and user friendly build system"
homepage "https://mesonbuild.com/"
url "https://github.com/mesonbuild/meson/releases/download/0.46.1/meson-0.46.1.tar.gz"
sha256 "19497a03e7e5b303d8d11f98789a79aba59b5ad4a81bd00f4d099be0212cee78"
bottle do
cellar :any_skip_relocation
sha256 "4f65a25c147b6e21ce47cfd2d2f744ee3c0a55e9e1b07c9119dfeb52b13946fe" => :mojave
sha256 "cadf29ef1454acee4573d184a01e86e9c05d636b445c21255314dcca80cd9585" => :high_sierra
sha256 "ac82416f8f8f99bfd0c19ba2196028d541b945f6bf401a018f59a0d81775988a" => :sierra
sha256 "a2434e205cbab983230a6019fa9520adb2a5c2c31eb8d430ac80b74ddec790b6" => :el_capitan
end
keg_only <<~EOS
this formula contains a heavily patched version of the meson build system and
is exclusively used internally by other formulae.
Users are advised to run `brew install meson` to install
the official meson build
EOS
depends_on "ninja"
depends_on "python"
# see https://github.com/mesonbuild/meson/pull/2577
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/a20d7df94112f93ea81f72ff3eacaa2d7e681053/meson-internal/meson-osx.patch?full_index=1"
sha256 "d8545f5ffbb4dcc58131f35a9a97188ecb522c6951574c616d0ad07495d68895"
end
def install
virtualenv_install_with_resources
end
test do
(testpath/"helloworld.c").write <<~EOS
main() {
puts("hi");
return 0;
}
EOS
(testpath/"meson.build").write <<~EOS
project('hello', 'c')
executable('hello', 'helloworld.c')
EOS
mkdir testpath/"build" do
system "#{bin}/meson", ".."
assert_predicate testpath/"build/build.ninja", :exist?
end
end
end
| bsd-2-clause |
janeen666/mi-instrument | mi/dataset/driver/ctdmo_ghqr/imodem/test/test_ctdmo_ghqr_imodem_recovered_driver.py | 917 | #!/usr/bin/env python
import os
import unittest
from mi.core.log import get_logger
from mi.dataset.dataset_driver import ParticleDataHandler
from mi.dataset.driver.ctdmo_ghqr.imodem.ctdmo_ghqr_imodem_recovered_driver import parse
from mi.dataset.driver.ctdmo_ghqr.imodem.resource import RESOURCE_PATH
__author__ = 'mworden'
log = get_logger()
class DriverTest(unittest.TestCase):
def test_one(self):
source_file_path = os.path.join(RESOURCE_PATH, 'ctdmo01_20140712_120719.DAT')
particle_data_handler = ParticleDataHandler()
particle_data_handler = parse(None, source_file_path, particle_data_handler)
log.debug("SAMPLES: %s", particle_data_handler._samples)
log.debug("FAILURE: %s", particle_data_handler._failure)
self.assertEquals(particle_data_handler._failure, False)
if __name__ == '__main__':
test = DriverTest('test_one')
test.test_one()
| bsd-2-clause |
ulule/pybbm | pybb/contrib/ban/compat.py | 225 | import django
__all__ = ['User']
# Django 1.5+ compatibility
if django.VERSION >= (1, 5):
from django.contrib.auth import get_user_model
User = get_user_model()
else:
from django.contrib.auth.models import User
| bsd-2-clause |
leoj3n/homebrew-cask | Casks/kismac.rb | 212 | class Kismac < Cask
url 'http://update.kismacmirror.com/binaries/KisMAC-0.3.3.dmg'
homepage 'http://kismac-ng.org/'
version '0.3.3'
sha1 '798056ecc6b604ffd7d3df23e220d3efa4b6c775'
link 'KisMAC.app'
end
| bsd-2-clause |
tidepool-org/platform | vendor/github.com/cloudevents/sdk-go/v2/client/client_observed.go | 2476 | package client
import (
"context"
"github.com/cloudevents/sdk-go/v2/event"
"github.com/cloudevents/sdk-go/v2/extensions"
"github.com/cloudevents/sdk-go/v2/observability"
"github.com/cloudevents/sdk-go/v2/protocol"
"go.opencensus.io/trace"
)
// NewObserved produces a new client with the provided transport object and applied
// client options.
func NewObserved(protocol interface{}, opts ...Option) (Client, error) {
client, err := New(protocol, opts...)
if err != nil {
return nil, err
}
c := &obsClient{client: client}
if err := c.applyOptions(opts...); err != nil {
return nil, err
}
return c, nil
}
type obsClient struct {
client Client
addTracing bool
}
func (c *obsClient) applyOptions(opts ...Option) error {
for _, fn := range opts {
if err := fn(c); err != nil {
return err
}
}
return nil
}
// Send transmits the provided event on a preconfigured Protocol. Send returns
// an error if there was an an issue validating the outbound event or the
// transport returns an error.
func (c *obsClient) Send(ctx context.Context, e event.Event) protocol.Result {
ctx, r := observability.NewReporter(ctx, reportSend)
ctx, span := trace.StartSpan(ctx, observability.ClientSpanName, trace.WithSpanKind(trace.SpanKindClient))
defer span.End()
if span.IsRecordingEvents() {
span.AddAttributes(EventTraceAttributes(&e)...)
}
if c.addTracing {
e.Context = e.Context.Clone()
extensions.FromSpanContext(span.SpanContext()).AddTracingAttributes(&e)
}
result := c.client.Send(ctx, e)
if protocol.IsACK(result) {
r.OK()
} else {
r.Error()
}
return result
}
func (c *obsClient) Request(ctx context.Context, e event.Event) (*event.Event, protocol.Result) {
ctx, r := observability.NewReporter(ctx, reportRequest)
ctx, span := trace.StartSpan(ctx, observability.ClientSpanName, trace.WithSpanKind(trace.SpanKindClient))
defer span.End()
if span.IsRecordingEvents() {
span.AddAttributes(EventTraceAttributes(&e)...)
}
resp, result := c.client.Request(ctx, e)
if protocol.IsACK(result) {
r.OK()
} else {
r.Error()
}
return resp, result
}
// StartReceiver sets up the given fn to handle Receive.
// See Client.StartReceiver for details. This is a blocking call.
func (c *obsClient) StartReceiver(ctx context.Context, fn interface{}) error {
ctx, r := observability.NewReporter(ctx, reportStartReceiver)
err := c.client.StartReceiver(ctx, fn)
if err != nil {
r.Error()
} else {
r.OK()
}
return err
}
| bsd-2-clause |
miataru/miataru-client-csharp | Json.NET/Source/Newtonsoft.Json/Serialization/ExpressionValueProvider.cs | 4236 | #region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
#if !(NET20 || NET35 || PORTABLE40)
using System;
using System.Collections.Generic;
#if NET20
using Newtonsoft.Json.Utilities.LinqBridge;
#endif
using System.Text;
using System.Reflection;
using Newtonsoft.Json.Utilities;
using System.Globalization;
namespace Newtonsoft.Json.Serialization
{
/// <summary>
/// Get and set values for a <see cref="MemberInfo"/> using dynamic methods.
/// </summary>
public class ExpressionValueProvider : IValueProvider
{
private readonly MemberInfo _memberInfo;
private Func<object, object> _getter;
private Action<object, object> _setter;
/// <summary>
/// Initializes a new instance of the <see cref="ExpressionValueProvider"/> class.
/// </summary>
/// <param name="memberInfo">The member info.</param>
public ExpressionValueProvider(MemberInfo memberInfo)
{
ValidationUtils.ArgumentNotNull(memberInfo, "memberInfo");
_memberInfo = memberInfo;
}
/// <summary>
/// Sets the value.
/// </summary>
/// <param name="target">The target to set the value on.</param>
/// <param name="value">The value to set on the target.</param>
public void SetValue(object target, object value)
{
try
{
if (_setter == null)
_setter = ExpressionReflectionDelegateFactory.Instance.CreateSet<object>(_memberInfo);
#if DEBUG
// dynamic method doesn't check whether the type is 'legal' to set
// add this check for unit tests
if (value == null)
{
if (!ReflectionUtils.IsNullable(ReflectionUtils.GetMemberUnderlyingType(_memberInfo)))
throw new JsonSerializationException("Incompatible value. Cannot set {0} to null.".FormatWith(CultureInfo.InvariantCulture, _memberInfo));
}
else if (!ReflectionUtils.GetMemberUnderlyingType(_memberInfo).IsAssignableFrom(value.GetType()))
{
throw new JsonSerializationException("Incompatible value. Cannot set {0} to type {1}.".FormatWith(CultureInfo.InvariantCulture, _memberInfo, value.GetType()));
}
#endif
_setter(target, value);
}
catch (Exception ex)
{
throw new JsonSerializationException("Error setting value to '{0}' on '{1}'.".FormatWith(CultureInfo.InvariantCulture, _memberInfo.Name, target.GetType()), ex);
}
}
/// <summary>
/// Gets the value.
/// </summary>
/// <param name="target">The target to get the value from.</param>
/// <returns>The value.</returns>
public object GetValue(object target)
{
try
{
if (_getter == null)
_getter = ExpressionReflectionDelegateFactory.Instance.CreateGet<object>(_memberInfo);
return _getter(target);
}
catch (Exception ex)
{
throw new JsonSerializationException("Error getting value from '{0}' on '{1}'.".FormatWith(CultureInfo.InvariantCulture, _memberInfo.Name, target.GetType()), ex);
}
}
}
}
#endif | bsd-2-clause |
slivas/hoz | vendor/codeception/codeception/bin/build.php | 233 | <?php
chdir(__DIR__.'/..');
system('php bin/update_docs.php');
system('php bin/build_site.php');
system('php bin/build_git.php');
// system('php bin/build_pear.php');
system('php bin/compile.php');
system('php bin/release_phar.php'); | bsd-3-clause |
michaelrice/yavijava | src/main/java/com/vmware/vim25/InvalidCAMServer.java | 2025 | /*================================================================================
Copyright (c) 2013 Steve Jin. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of VMware, Inc. nor the names of its contributors may be used
to endorse or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
================================================================================*/
package com.vmware.vim25;
/**
* @author Steve Jin (http://www.doublecloud.org)
* @version 5.1
*/
@SuppressWarnings("all")
public class InvalidCAMServer extends ActiveDirectoryFault {
public String camServer;
public String getCamServer() {
return this.camServer;
}
public void setCamServer(String camServer) {
this.camServer = camServer;
}
} | bsd-3-clause |
fregaham/KiWi | src/model/kiwi/model/activity/DeleteActivity.java | 2473 | /*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 2008-2009, The KiWi Project (http://www.kiwi-project.eu)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the KiWi Project nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* Contributor(s):
*
*
*/
package kiwi.model.activity;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
/**
* Activity representing the deletion of a ContentItem by a user.
*
* @author Sebastian Schaffert
*
*/
@Entity
@DiscriminatorValue("DELETE")
public class DeleteActivity extends ContentItemActivity {
/**
* Call constructor of super class (initialises some fields).
*/
public DeleteActivity() {
super();
}
/**
* Returns the message identifier of this activity, "activity.deleteContentItem".
*
* @author Sebastian Schaffert
* @return "activity.deleteContentItem"
* @see kiwi.model.activity.Activity#getMessageIdentifier()
*/
@Override
public String getMessageIdentifier() {
return "activity.deleteContentItem";
}
}
| bsd-3-clause |
chromium/chromium | chrome/browser/resources/history/history_clusters/page_favicon.ts | 2131 | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import './shared_vars.js';
import {getFaviconForPageURL} from 'chrome://resources/js/icon.js';
import {Url} from 'chrome://resources/mojo/url/mojom/url.mojom-webui.js';
import {PolymerElement} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js';
import {getTemplate} from './page_favicon.html.js';
/**
* @fileoverview This file provides a custom element displaying a page favicon.
*/
declare global {
interface HTMLElementTagNameMap {
'page-favicon': PageFavicon,
}
}
class PageFavicon extends PolymerElement {
static get is() {
return 'page-favicon';
}
static get template() {
return getTemplate();
}
static get properties() {
return {
/**
* Whether the favicon belongs to a top visit.
*/
isTopVisitFavicon: {
type: Boolean,
reflectToAttribute: true,
value: false,
},
/**
* The element's style attribute.
*/
style: {
type: String,
computed: `computeStyle_(url, isTopVisitFavicon)`,
reflectToAttribute: true,
},
/**
* The URL for which the favicon is shown.
*/
url: Object,
};
}
//============================================================================
// Properties
//============================================================================
isTopVisitFavicon: boolean;
url: Url;
//============================================================================
// Helper methods
//============================================================================
private computeStyle_(): string {
if (!this.url) {
return '';
}
return `background-image:${
getFaviconForPageURL(
this.url.url, false, '',
this.isTopVisitFavicon ? /** --top-visit-favicon-size */ 24 :
/** --favicon-size */ 16)}`;
}
}
customElements.define(PageFavicon.is, PageFavicon);
| bsd-3-clause |
whitefire/zf2 | library/Zend/Service/WindowsAzure/RetryPolicy/RetryN.php | 2750 | <?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Service_WindowsAzure
* @subpackage RetryPolicy
* @copyright Copyright (c) 2005-2011 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
/**
* @uses Zend_Service_WindowsAzure_RetryPolicy_AbstractRetryPolicy
* @uses Zend_Service_WindowsAzure_RetryPolicy_Exception
* @category Zend
* @package Zend_Service_WindowsAzure
* @subpackage RetryPolicy
* @copyright Copyright (c) 2005-2011 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class Zend_Service_WindowsAzure_RetryPolicy_RetryN extends Zend_Service_WindowsAzure_RetryPolicy_AbstractRetryPolicy
{
/**
* Number of retries
*
* @var int
*/
protected $_retryCount = 1;
/**
* Interval between retries (in milliseconds)
*
* @var int
*/
protected $_retryInterval = 0;
/**
* Constructor
*
* @param int $count Number of retries
* @param int $intervalBetweenRetries Interval between retries (in milliseconds)
*/
public function __construct($count = 1, $intervalBetweenRetries = 0)
{
$this->_retryCount = $count;
$this->_retryInterval = $intervalBetweenRetries;
}
/**
* Execute function under retry policy
*
* @param string|array $function Function to execute
* @param array $parameters Parameters for function call
* @return mixed
*/
public function execute($function, $parameters = array())
{
$returnValue = null;
for ($retriesLeft = $this->_retryCount; $retriesLeft >= 0; --$retriesLeft) {
try {
$returnValue = call_user_func_array($function, $parameters);
return $returnValue;
} catch (Exception $ex) {
if ($retriesLeft == 1) {
throw new Zend_Service_WindowsAzure_RetryPolicy_Exception("Exceeded retry count of " . $this->_retryCount . ". " . $ex->getMessage());
}
usleep($this->_retryInterval * 1000);
}
}
}
}
| bsd-3-clause |
chromium/chromium | device/vr/openxr/openxr_input_helper.cc | 10199 | // Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "device/vr/openxr/openxr_input_helper.h"
#include "device/gamepad/public/cpp/gamepad.h"
#include "device/vr/openxr/openxr_util.h"
#include "device/vr/util/xr_standard_gamepad_builder.h"
namespace device {
namespace {
absl::optional<GamepadBuilder::ButtonData> GetAxisButtonData(
OpenXrAxisType openxr_button_type,
absl::optional<GamepadButton> button_data,
std::vector<double> axis) {
GamepadBuilder::ButtonData data;
if (!button_data || axis.size() != 2) {
return absl::nullopt;
}
switch (openxr_button_type) {
case OpenXrAxisType::kThumbstick:
data.type = GamepadBuilder::ButtonData::Type::kThumbstick;
break;
case OpenXrAxisType::kTrackpad:
data.type = GamepadBuilder::ButtonData::Type::kTouchpad;
break;
}
data.touched = button_data->touched;
data.pressed = button_data->pressed;
data.value = button_data->value;
// Invert the y axis because -1 is up in the Gamepad API, but down in
// OpenXR.
data.x_axis = axis.at(0);
data.y_axis = -axis.at(1);
return data;
}
absl::optional<Gamepad> GetXrStandardGamepad(
const OpenXrController& controller) {
XRStandardGamepadBuilder builder(controller.GetHandness());
absl::optional<GamepadButton> trigger_button =
controller.GetButton(OpenXrButtonType::kTrigger);
if (!trigger_button)
return absl::nullopt;
builder.SetPrimaryButton(trigger_button.value());
absl::optional<GamepadButton> squeeze_button =
controller.GetButton(OpenXrButtonType::kSqueeze);
if (squeeze_button)
builder.SetSecondaryButton(squeeze_button.value());
absl::optional<GamepadButton> trackpad_button =
controller.GetButton(OpenXrButtonType::kTrackpad);
std::vector<double> trackpad_axis =
controller.GetAxis(OpenXrAxisType::kTrackpad);
absl::optional<GamepadBuilder::ButtonData> trackpad_button_data =
GetAxisButtonData(OpenXrAxisType::kTrackpad, trackpad_button,
trackpad_axis);
if (trackpad_button_data)
builder.SetTouchpadData(trackpad_button_data.value());
absl::optional<GamepadButton> thumbstick_button =
controller.GetButton(OpenXrButtonType::kThumbstick);
std::vector<double> thumbstick_axis =
controller.GetAxis(OpenXrAxisType::kThumbstick);
absl::optional<GamepadBuilder::ButtonData> thumbstick_button_data =
GetAxisButtonData(OpenXrAxisType::kThumbstick, thumbstick_button,
thumbstick_axis);
if (thumbstick_button_data)
builder.SetThumbstickData(thumbstick_button_data.value());
absl::optional<GamepadButton> x_button =
controller.GetButton(OpenXrButtonType::kButton1);
if (x_button)
builder.AddOptionalButtonData(x_button.value());
absl::optional<GamepadButton> y_button =
controller.GetButton(OpenXrButtonType::kButton2);
if (y_button)
builder.AddOptionalButtonData(y_button.value());
absl::optional<GamepadButton> thumbrest_button =
controller.GetButton(OpenXrButtonType::kThumbrest);
if (thumbrest_button)
builder.AddOptionalButtonData(thumbrest_button.value());
absl::optional<GamepadButton> grasp_button =
controller.GetButton(OpenXrButtonType::kGrasp);
if (grasp_button)
builder.AddOptionalButtonData(grasp_button.value());
absl::optional<GamepadButton> shoulder_button =
controller.GetButton(OpenXrButtonType::kShoulder);
if (shoulder_button)
builder.AddOptionalButtonData(shoulder_button.value());
return builder.GetGamepad();
}
} // namespace
XrResult OpenXRInputHelper::CreateOpenXRInputHelper(
XrInstance instance,
XrSystemId system,
const OpenXrExtensionHelper& extension_helper,
XrSession session,
XrSpace local_space,
std::unique_ptr<OpenXRInputHelper>* helper) {
std::unique_ptr<OpenXRInputHelper> new_helper =
std::make_unique<OpenXRInputHelper>(session, local_space);
RETURN_IF_XR_FAILED(
new_helper->Initialize(instance, system, extension_helper));
*helper = std::move(new_helper);
return XR_SUCCESS;
}
OpenXRInputHelper::OpenXRInputHelper(XrSession session, XrSpace local_space)
: session_(session),
local_space_(local_space),
path_helper_(std::make_unique<OpenXRPathHelper>()) {}
OpenXRInputHelper::~OpenXRInputHelper() = default;
XrResult OpenXRInputHelper::Initialize(
XrInstance instance,
XrSystemId system,
const OpenXrExtensionHelper& extension_helper) {
RETURN_IF_XR_FAILED(path_helper_->Initialize(instance, system));
// This map is used to store bindings for different kinds of interaction
// profiles. This allows the runtime to choose a different input sources based
// on availability.
std::map<XrPath, std::vector<XrActionSuggestedBinding>> bindings;
for (size_t i = 0; i < controller_states_.size(); i++) {
RETURN_IF_XR_FAILED(controller_states_[i].controller.Initialize(
static_cast<OpenXrHandednessType>(i), instance, session_,
path_helper_.get(), extension_helper, &bindings));
controller_states_[i].primary_button_pressed = false;
controller_states_[i].squeeze_button_pressed = false;
}
for (auto it = bindings.begin(); it != bindings.end(); it++) {
XrInteractionProfileSuggestedBinding profile_suggested_bindings = {
XR_TYPE_INTERACTION_PROFILE_SUGGESTED_BINDING};
profile_suggested_bindings.interactionProfile = it->first;
profile_suggested_bindings.suggestedBindings = it->second.data();
profile_suggested_bindings.countSuggestedBindings = it->second.size();
RETURN_IF_XR_FAILED(xrSuggestInteractionProfileBindings(
instance, &profile_suggested_bindings));
}
std::vector<XrActionSet> action_sets(controller_states_.size());
for (size_t i = 0; i < controller_states_.size(); i++) {
action_sets[i] = controller_states_[i].controller.action_set();
}
XrSessionActionSetsAttachInfo attach_info = {
XR_TYPE_SESSION_ACTION_SETS_ATTACH_INFO};
attach_info.countActionSets = action_sets.size();
attach_info.actionSets = action_sets.data();
RETURN_IF_XR_FAILED(xrAttachSessionActionSets(session_, &attach_info));
return XR_SUCCESS;
}
std::vector<mojom::XRInputSourceStatePtr> OpenXRInputHelper::GetInputState(
bool hand_input_enabled,
XrTime predicted_display_time) {
std::vector<mojom::XRInputSourceStatePtr> input_states;
if (XR_FAILED(SyncActions(predicted_display_time))) {
for (OpenXrControllerState& state : controller_states_) {
state.primary_button_pressed = false;
state.squeeze_button_pressed = false;
}
return input_states;
}
for (uint32_t i = 0; i < controller_states_.size(); i++) {
device::OpenXrController* controller = &controller_states_[i].controller;
absl::optional<GamepadButton> primary_button =
controller->GetButton(OpenXrButtonType::kTrigger);
absl::optional<GamepadButton> squeeze_button =
controller->GetButton(OpenXrButtonType::kSqueeze);
// Having a trigger button is the minimum for an webxr input.
// No trigger button indicates input is not connected.
if (!primary_button) {
continue;
}
device::mojom::XRInputSourceStatePtr state =
device::mojom::XRInputSourceState::New();
// ID 0 will cause a DCHECK in the hash table used on the blink side.
// To ensure that we don't have any collisions with other ids, increment
// all of the ids by one.
state->source_id = i + 1;
state->description = controller->GetDescription(predicted_display_time);
if (!state->description) {
continue;
}
state->mojo_from_input = controller->GetMojoFromGripTransform(
predicted_display_time, local_space_, &state->emulated_position);
state->primary_input_pressed = primary_button.value().pressed;
state->primary_input_clicked =
controller_states_[i].primary_button_pressed &&
!state->primary_input_pressed;
controller_states_[i].primary_button_pressed = state->primary_input_pressed;
if (squeeze_button) {
state->primary_squeeze_pressed = squeeze_button.value().pressed;
state->primary_squeeze_clicked =
controller_states_[i].squeeze_button_pressed &&
!state->primary_squeeze_pressed;
controller_states_[i].squeeze_button_pressed =
state->primary_squeeze_pressed;
}
state->gamepad = GetWebXRGamepad(*controller);
// Return hand state if controller is a hand and the hand tracking feature
// was requested for the session
if (hand_input_enabled) {
state->hand_tracking_data =
controller->GetHandTrackingData(local_space_, predicted_display_time);
}
input_states.push_back(std::move(state));
}
return input_states;
}
XrResult OpenXRInputHelper::OnInteractionProfileChanged() {
for (OpenXrControllerState& controller_state : controller_states_) {
RETURN_IF_XR_FAILED(controller_state.controller.UpdateInteractionProfile());
}
return XR_SUCCESS;
}
absl::optional<Gamepad> OpenXRInputHelper ::GetWebXRGamepad(
const OpenXrController& controller) {
OpenXrInteractionProfileType cur_type = controller.interaction_profile();
for (auto& it : GetOpenXrControllerInteractionProfiles()) {
if (it.type == cur_type) {
if (it.mapping == GamepadMapping::kXrStandard) {
return GetXrStandardGamepad(controller);
} else {
// if mapping is kNone
return absl::nullopt;
}
}
}
return absl::nullopt;
}
XrResult OpenXRInputHelper::SyncActions(XrTime predicted_display_time) {
std::vector<XrActiveActionSet> active_action_sets(controller_states_.size());
for (size_t i = 0; i < controller_states_.size(); i++) {
active_action_sets[i].actionSet =
controller_states_[i].controller.action_set();
active_action_sets[i].subactionPath = XR_NULL_PATH;
}
XrActionsSyncInfo sync_info = {XR_TYPE_ACTIONS_SYNC_INFO};
sync_info.countActiveActionSets = active_action_sets.size();
sync_info.activeActionSets = active_action_sets.data();
return xrSyncActions(session_, &sync_info);
}
} // namespace device
| bsd-3-clause |
Serebriakov/micromod | micromod/compiler/Row.java | 1533 | package micromod.compiler;
public class Row implements Element {
private int rowIdx;
private Pattern parent;
public Row( Pattern parent ) {
this.parent = parent;
}
public String getToken() {
return "Row";
}
public Element getParent() {
return parent;
}
public Element getSibling() {
return null;
}
public Element getChild() {
return null;
}
public void begin( String row ) {
String[] notes = Parser.split( row, ' ' );
micromod.Note output = new micromod.Note();
int noteIdx = 0;
if( notes[ noteIdx ].length() < 4 ) {
int idx = Parser.parseInteger( notes[ noteIdx++ ] );
if( idx < rowIdx ) {
throw new IllegalArgumentException( "Row index is less less than current (" + rowIdx + "): " + idx );
}
rowIdx = idx;
}
int chanIdx = 0;
while( noteIdx < notes.length ) {
try {
output.fromString( notes[ noteIdx++ ] );
parent.setNote( rowIdx, chanIdx, output );
chanIdx++;
} catch( IllegalArgumentException e ) {
String msg = "At Pattern " + parent.getPatternIdx() + " Row " + rowIdx + " Channel " + chanIdx;
throw new IllegalArgumentException( msg + ": " + e.getMessage() );
}
}
rowIdx++;
}
public void end() {
}
public String description() {
return "\"00 C-2-1--- --------\" (Specify a single row within a pattern.)\n" +
"(Row index, from 0 to 63, followed by notes for each channel.)\n" +
"(See the '-notes' command line option for more information.)";
}
public void setRowIdx( int rowIdx ) {
this.rowIdx = rowIdx;
}
}
| bsd-3-clause |
imella/spree | core/app/models/spree/promotion/rules/user.rb | 828 | module Spree
class Promotion
module Rules
class User < PromotionRule
belongs_to :user, class_name: "::#{Spree.user_class}"
has_many :promotion_rule_users, class_name: 'Spree::PromotionRuleUser',
foreign_key: :promotion_rule_id,
dependent: :destroy
has_many :users, through: :promotion_rule_users, class_name: "::#{Spree.user_class}"
def applicable?(promotable)
promotable.is_a?(Spree::Order)
end
def eligible?(order, _options = {})
users.include?(order.user)
end
def user_ids_string
user_ids.join(',')
end
def user_ids_string=(s)
# check this
self.user_ids = s
end
end
end
end
end
| bsd-3-clause |
shimib/scala | test/files/run/reflection-companiontype.scala | 912 | import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
class C
object C
object Test extends App {
type T = C
println("TypeRefs")
println(showRaw(typeOf[C].companion, printKinds = true))
println(showRaw(typeOf[C].companion.companion, printKinds = true))
println(showRaw(typeOf[C.type].companion, printKinds = true))
println("ClassInfoTypes")
println(showRaw(typeOf[C].typeSymbol.info.companion, printKinds = true))
println(showRaw(typeOf[C].typeSymbol.info.companion.typeSymbol.info.companion, printKinds = true))
println(showRaw(typeOf[C.type].typeSymbol.info.companion, printKinds = true))
println("Unrelated")
println(showRaw(typeOf[T].companion, printKinds = true))
println(showRaw(cm.staticPackage("scala").moduleClass.asType.toType.companion, printKinds = true))
println(showRaw(cm.staticPackage("scala").info.companion, printKinds = true))
} | bsd-3-clause |
mdiggory/dryad-repo | dspace/modules/xmlui/src/main/java/org/dspace/app/xmlui/aspect/administrative/FlowItemUtils.java | 27061 | /**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.administrative;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.Enumeration;
import org.apache.cocoon.environment.Request;
import org.apache.cocoon.servlet.multipart.Part;
import org.dspace.app.xmlui.utils.UIException;
import org.dspace.app.xmlui.wing.Message;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.*;
import org.dspace.content.authority.Choices;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.curate.Curator;
import org.dspace.embargo.EmbargoManager;
import org.dspace.handle.HandleManager;
/**
* Utility methods to processes actions on Groups. These methods are used
* exclusivly from the administrative flow scripts.
*
* @author Jay Paz
* @author Scott Phillips
*/
public class FlowItemUtils
{
/** Language Strings */
private static final Message T_metadata_updated = new Message("default","The Item's metadata was successfully updated.");
private static final Message T_metadata_added = new Message("default","New metadata was added.");
private static final Message T_item_withdrawn = new Message("default","The item has been withdrawn.");
private static final Message T_item_reinstated = new Message("default","The item has been reinstated.");
private static final Message T_item_moved = new Message("default","The item has been moved.");
private static final Message T_item_move_destination_not_found = new Message("default","The selected destination collection could not be found.");
private static final Message T_bitstream_added = new Message("default","The new bitstream was successfully uploaded.");
private static final Message T_bitstream_failed = new Message("default","Error while uploading file.");
private static final Message T_bitstream_updated = new Message("default","The bitstream has been updated.");
private static final Message T_bitstream_delete = new Message("default","The selected bitstreams have been deleted.");
private static final Message T_embargo_set = new Message("default","New embargo was set.");
private static final Message T_embargo_removed = new Message("default","Embargo was disabled.");
private static final Message T_embargo_not_set = new Message("default","The embargo could not be configured. Please make sure you have a valid future date.");
/**
* Resolve the given identifier to an item. The identifier may be either an
* internal ID or a handle. If an item is found then the result the internal
* ID of the item will be placed in the result "itemID" parameter.
*
* If the identifier was unable to be resolved to an item then the "identifier"
* field is placed in error.
*
* @param context The current DSpace context.
* @param identifier An Internal ID or a handle
* @return A flow result
*/
public static FlowResult resolveItemIdentifier(Context context, String identifier) throws SQLException
{
FlowResult result = new FlowResult();
result.setContinue(false);
// Check whether it's a handle or internal id (by check ing if it has a slash inthe string)
if (identifier.contains("/"))
{
DSpaceObject dso = HandleManager.resolveToObject(context, identifier);
if (dso != null && dso.getType() == Constants.ITEM)
{
result.setParameter("itemID", dso.getID());
result.setParameter("type", Constants.ITEM);
result.setContinue(true);
return result;
}
}
else
{
Item item = null;
try {
item = Item.find(context, Integer.valueOf(identifier));
} catch (NumberFormatException e) {
// ignoring the exception
}
if (item != null)
{
result.setParameter("itemID", item.getID());
result.setParameter("type", Constants.ITEM);
result.setContinue(true);
return result;
}
}
result.addError("identifier");
return result;
}
/**
* Process the request parameters to update the item's metadata and remove any selected bitstreams.
*
* Each metadata entry will have three fields "name_X", "value_X", and "language_X" where X is an
* integer that relates all three of the fields together. The name parameter stores the metadata name
* that is used by the entry (i.e schema_element_qualifier). The value and language paramaters are user
* inputed fields. If the optional parameter "remove_X" is given then the metadata value is removed.
*
* To support AJAX operations on this page an aditional parameter is considered, the "scope". The scope
* is the set of metadata entries that are being updated during this request. It the metadata name,
* schema_element_qualifier, only fields that have this name are considered! If all fields are to be
* considered then scope should be set to "*".
*
* When creating an AJAX query include all the name_X, value_X, language_X, and remove_X for the fields
* in the set, and then set the scope parameter to be the metadata field.
*
* @param context The current DSpace context
* @param itemID internal item id
* @param request the Cocoon request
* @return A flow result
*/
public static FlowResult processEditItem(Context context, int itemID, Request request) throws SQLException, AuthorizeException, UIException, IOException
{
FlowResult result = new FlowResult();
result.setContinue(false);
Item item = Item.find(context, itemID);
// STEP 1:
// Clear all metadata within the scope
// Only metadata values within this scope will be considered. This
// is so ajax request can operate on only a subset of the values.
String scope = request.getParameter("scope");
if ("*".equals(scope))
{
item.clearMetadata(Item.ANY, Item.ANY, Item.ANY, Item.ANY);
}
else
{
String[] parts = parseName(scope);
item.clearMetadata(parts[0],parts[1],parts[2],Item.ANY);
}
// STEP 2:
// First determine all the metadata fields that are within
// the scope parameter
ArrayList<Integer> indexes = new ArrayList<Integer>();
Enumeration parameters = request.getParameterNames();
while(parameters.hasMoreElements())
{
// Only consider the name_ fields
String parameterName = (String) parameters.nextElement();
if (parameterName.startsWith("name_"))
{
// Check if the name is within the scope
String parameterValue = request.getParameter(parameterName);
if ("*".equals(scope) || scope.equals(parameterValue))
{
// Extract the index from the name.
String indexString = parameterName.substring("name_".length());
Integer index = Integer.valueOf(indexString);
indexes.add(index);
}
}
}
// STEP 3:
// Iterate over all the indexes within the scope and add them back in.
for (Integer index=1; index <= indexes.size(); ++index)
{
String name = request.getParameter("name_"+index);
String value = request.getParameter("value_"+index);
String authority = request.getParameter("value_"+index+"_authority");
String confidence = request.getParameter("value_"+index+"_confidence");
String lang = request.getParameter("language_"+index);
String remove = request.getParameter("remove_"+index);
// the user selected the remove checkbox.
if (remove != null)
{
continue;
}
// get the field's name broken up
String[] parts = parseName(name);
// probe for a confidence value
int iconf = Choices.CF_UNSET;
if (confidence != null && confidence.length() > 0)
{
iconf = Choices.getConfidenceValue(confidence);
}
// upgrade to a minimum of NOVALUE if there IS an authority key
if (authority != null && authority.length() > 0 && iconf == Choices.CF_UNSET)
{
iconf = Choices.CF_NOVALUE;
}
item.addMetadata(parts[0], parts[1], parts[2], lang,
value, authority, iconf);
}
item.update();
context.commit();
result.setContinue(true);
result.setOutcome(true);
result.setMessage(T_metadata_updated);
return result;
}
/**
* Process the request paramaters to add a new metadata entry for the item.
*
* @param context The current DSpace context
* @param itemID internal item id
* @param request the Cocoon request
* @return A flow result
*/
public static FlowResult processAddMetadata(Context context, int itemID, Request request) throws SQLException, AuthorizeException, UIException, IOException
{
FlowResult result = new FlowResult();
result.setContinue(false);
Item item = Item.find(context, itemID);
String fieldID = request.getParameter("field");
String value = request.getParameter("value");
String language = request.getParameter("language");
MetadataField field = MetadataField.find(context,Integer.valueOf(fieldID));
MetadataSchema schema = MetadataSchema.find(context,field.getSchemaID());
item.addMetadata(schema.getName(), field.getElement(), field.getQualifier(), language, value);
item.update();
context.commit();
result.setContinue(true);
result.setOutcome(true);
result.setMessage(T_metadata_added);
return result;
}
/**
* Withdraw the specified item, this method assumes that the action has been confirmed.
*
* @param context The DSpace context
* @param itemID The id of the to-be-withdrawn item.
* @return A result object
*/
public static FlowResult processWithdrawItem(Context context, int itemID) throws SQLException, AuthorizeException, IOException
{
FlowResult result = new FlowResult();
result.setContinue(false);
Item item = Item.find(context, itemID);
item.withdraw();
context.commit();
result.setContinue(true);
result.setOutcome(true);
result.setMessage(T_item_withdrawn);
return result;
}
/**
* Reinstate the specified item, this method assumes that the action has been confirmed.
*
* @param context The DSpace context
* @param itemID The id of the to-be-reinstated item.
* @return A result object
*/
public static FlowResult processReinstateItem(Context context, int itemID) throws SQLException, AuthorizeException, IOException
{
FlowResult result = new FlowResult();
result.setContinue(false);
Item item = Item.find(context, itemID);
item.reinstate();
context.commit();
result.setContinue(true);
result.setOutcome(true);
result.setMessage(T_item_reinstated);
return result;
}
/**
* Move the specified item to another collection.
*
* @param context The DSpace context
* @param itemID The id of the to-be-moved item.
* @param collectionID The id of the destination collection.
* @param inherit Whether to inherit the policies of the destination collection
* @return A result object
*/
public static FlowResult processMoveItem(Context context, int itemID, int collectionID, boolean inherit) throws SQLException, AuthorizeException, IOException
{
FlowResult result = new FlowResult();
result.setContinue(false);
Item item = Item.find(context, itemID);
if(AuthorizeManager.isAdmin(context, item))
{
//Add an action giving this user *explicit* admin permissions on the item itself.
//This ensures that the user will be able to call item.update() even if he/she
// moves it to a Collection that he/she doesn't administer.
if (item.canEdit())
{
AuthorizeManager.authorizeAction(context, item, Constants.WRITE);
}
Collection destination = Collection.find(context, collectionID);
if (destination == null)
{
result.setOutcome(false);
result.setContinue(false);
result.setMessage(T_item_move_destination_not_found);
return result;
}
Collection owningCollection = item.getOwningCollection();
if (destination.equals(owningCollection))
{
// nothing to do
result.setOutcome(false);
result.setContinue(false);
return result;
}
// note: an item.move() method exists, but does not handle several cases:
// - no preexisting owning collection (first arg is null)
// - item already in collection, but not an owning collection
// (works, but puts item in collection twice)
// Don't re-add the item to a collection it's already in.
boolean alreadyInCollection = false;
for (Collection collection : item.getCollections())
{
if (collection.equals(destination))
{
alreadyInCollection = true;
break;
}
}
// Remove item from its owning collection and add to the destination
if (!alreadyInCollection)
{
destination.addItem(item);
}
if (owningCollection != null)
{
owningCollection.removeItem(item);
}
item.setOwningCollection(destination);
// Inherit policies of destination collection if required
if (inherit)
{
item.inheritCollectionDefaultPolicies(destination);
}
item.update();
context.commit();
result.setOutcome(true);
result.setContinue(true);
result.setMessage(T_item_moved);
}
return result;
}
/**
* Permanently delete the specified item, this method assumes that
* the action has been confirmed.
*
* @param context The DSpace context
* @param itemID The id of the to-be-deleted item.
* @return A result object
*/
public static FlowResult processDeleteItem(Context context, int itemID) throws SQLException, AuthorizeException, IOException
{
FlowResult result = new FlowResult();
result.setContinue(false);
Item item = Item.find(context, itemID);
Collection[] collections = item.getCollections();
// Remove item from all the collections it's in
for (Collection collection : collections)
{
collection.removeItem(item);
}
// Note: when removing an item from the last collection it will
// be removed from the system. So there is no need to also call
// an item.delete() method.
context.commit();
result.setContinue(true);
return result;
}
/**
* Add a new bitstream to the item. The bundle, bitstream (aka file), and description
* will be used to create a new bitstream. If the format needs to be adjusted then they
* will need to access the edit bitstream form after it has been uploaded.
*
* @param context The DSpace content
* @param itemID The item to add a new bitstream too
* @param request The request.
* @return A flow result
*/
public static FlowResult processAddBitstream(Context context, int itemID, Request request) throws SQLException, AuthorizeException, IOException
{
FlowResult result = new FlowResult();
result.setContinue(false);
// Upload a new file
Item item = Item.find(context, itemID);
Object object = request.get("file");
Part filePart = null;
if (object instanceof Part)
{
filePart = (Part) object;
}
if (filePart != null && filePart.getSize() > 0)
{
InputStream is = filePart.getInputStream();
String bundleName = request.getParameter("bundle");
Bitstream bitstream;
Bundle[] bundles = item.getBundles(bundleName);
if (bundles.length < 1)
{
// set bundle's name to ORIGINAL
bitstream = item.createSingleBitstream(is, bundleName);
// set the permission as defined in the owning collection
Collection owningCollection = item.getOwningCollection();
if (owningCollection != null)
{
Bundle bnd = bitstream.getBundles()[0];
bnd.inheritCollectionDefaultPolicies(owningCollection);
}
}
else
{
// we have a bundle already, just add bitstream
bitstream = bundles[0].createBitstream(is);
}
// Strip all but the last filename. It would be nice
// to know which OS the file came from.
String name = filePart.getUploadName();
while (name.indexOf('/') > -1)
{
name = name.substring(name.indexOf('/') + 1);
}
while (name.indexOf('\\') > -1)
{
name = name.substring(name.indexOf('\\') + 1);
}
bitstream.setName(name);
bitstream.setSource(filePart.getUploadName());
bitstream.setDescription(request.getParameter("description"));
// Identify the format
BitstreamFormat format = FormatIdentifier.guessFormat(context, bitstream);
bitstream.setFormat(format);
// Update to DB
bitstream.update();
item.update();
context.commit();
result.setContinue(true);
result.setOutcome(true);
result.setMessage(T_bitstream_added);
}
else
{
result.setContinue(false);
result.setOutcome(false);
result.setMessage(T_bitstream_failed);
}
return result;
}
/**
* Update a bitstream's metadata.
*
* @param context The DSpace content
* @param itemID The item to which the bitstream belongs
* @param bitstreamID The bitstream being updated.
* @param description The new description of the bitstream
* @param formatID The new format ID of the bitstream
* @param userFormat Any user supplied formats.
* @return A flow result object.
*/
public static FlowResult processEditBitstream(Context context, int itemID, int bitstreamID, String primary, String description, int formatID, String userFormat) throws SQLException, AuthorizeException
{
FlowResult result = new FlowResult();
result.setContinue(false);
Bitstream bitstream = Bitstream.find(context, bitstreamID);
BitstreamFormat currentFormat = bitstream.getFormat();
//Step 1:
// Update the bitstream's description
if (description != null)
{
bitstream.setDescription(description);
}
//Step 2:
// Check if the primary bitstream status has changed
Bundle[] bundles = bitstream.getBundles();
if (bundles != null && bundles.length > 0)
{
if (bitstreamID == bundles[0].getPrimaryBitstreamID())
{
// currently the bitstream is primary
if ("no".equals(primary))
{
// However the user has removed this bitstream as a primary bitstream.
bundles[0].unsetPrimaryBitstreamID();
bundles[0].update();
}
}
else
{
// currently the bitstream is non-primary
if ("yes".equals(primary))
{
// However the user has set this bitstream as primary.
bundles[0].setPrimaryBitstreamID(bitstreamID);
bundles[0].update();
}
}
}
//Step 2:
// Update the bitstream's format
if (formatID > 0)
{
if (currentFormat == null || currentFormat.getID() != formatID)
{
BitstreamFormat newFormat = BitstreamFormat.find(context, formatID);
if (newFormat != null)
{
bitstream.setFormat(newFormat);
}
}
}
else
{
if (userFormat != null && userFormat.length() > 0)
{
bitstream.setUserFormatDescription(userFormat);
}
}
//Step 3:
// Save our changes
bitstream.update();
context.commit();
result.setContinue(true);
result.setOutcome(true);
result.setMessage(T_bitstream_updated);
return result;
}
/**
* Delete the given bitstreams from the bundle and item. If there are no more bitstreams
* left in a bundle then also remove it.
*
* @param context Current dspace content
* @param itemID The item id from which to remove bitstreams
* @param bitstreamIDs A bundle slash bitstream id pair of bitstreams to be removed.
* @return A flow result
*/
public static FlowResult processDeleteBitstreams(Context context, int itemID, String[] bitstreamIDs) throws SQLException, AuthorizeException, IOException, UIException
{
FlowResult result = new FlowResult();
result.setContinue(false);
Item item = Item.find(context, itemID);
for (String id : bitstreamIDs)
{
String[] parts = id.split("/");
if (parts.length != 2)
{
throw new UIException("Unable to parse id into bundle and bitstream id: "+id);
}
int bundleID = Integer.valueOf(parts[0]);
int bitstreamID = Integer.valueOf(parts[1]);
Bundle bundle = Bundle.find(context, bundleID);
Bitstream bitstream = Bitstream.find(context,bitstreamID);
bundle.removeBitstream(bitstream);
if (bundle.getBitstreams().length == 0)
{
item.removeBundle(bundle);
}
}
item.update();
context.commit();
result.setContinue(true);
result.setOutcome(true);
result.setMessage(T_bitstream_delete);
return result;
}
/**
* processCurateDSO
*
* Utility method to process curation tasks
* submitted via the DSpace GUI
*
* @param context
* @param itemID
* @param request
*
*/
public static FlowResult processCurateItem(Context context, int itemID, Request request)
throws AuthorizeException, IOException, SQLException, Exception
{
String task = request.getParameter("curate_task");
Curator curator = FlowCurationUtils.getCurator(task);
Item item = Item.find(context, itemID);
if (item != null)
{
curator.curate(item);
}
return FlowCurationUtils.getRunFlowResult(task, curator);
}
/**
* queues curation tasks
*/
public static FlowResult processQueueItem(Context context, int itemID, Request request)
throws AuthorizeException, IOException, SQLException, Exception
{
String task = request.getParameter("curate_task");
Curator curator = FlowCurationUtils.getCurator(task);
String objId = String.valueOf(itemID);
String taskQueueName = ConfigurationManager.getProperty("curate", "ui.queuename");
boolean status = false;
Item item = Item.find(context, itemID);
if (item != null)
{
objId = item.getHandle();
try
{
curator.queue(context, objId, taskQueueName);
status = true;
}
catch (IOException ioe)
{
// no-op
}
}
return FlowCurationUtils.getQueueFlowResult(task, status, objId, taskQueueName);
}
/**
* Parse the given name into three parts, divided by an _. Each part should represent the
* schema, element, and qualifier. You are guaranteed that if no qualifier was supplied the
* third entry is null.
*
* @param name The name to be parsed.
* @return An array of name parts.
*/
private static String[] parseName(String name) throws UIException
{
String[] parts = new String[3];
String[] split = name.split("_");
if (split.length == 2) {
parts[0] = split[0];
parts[1] = split[1];
parts[2] = null;
} else if (split.length == 3) {
parts[0] = split[0];
parts[1] = split[1];
parts[2] = split[2];
} else {
throw new UIException("Unable to parse metedata field name: "+name);
}
return parts;
}
/**
* Process the request parameters for the embargo of the item
* @param context the current dspace context
* @param itemID internal item id
* @param request the Cocoon request
* @return A flow result
*/
public static FlowResult processEditEmbargo(Context context, int itemID, Request request) throws SQLException, AuthorizeException, IOException {
FlowResult result = new FlowResult();
result.setContinue(false);
Item item = Item.find(context, itemID);
Date embargoDate = null;
String date = request.getParameter("embargoed_until");
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
dateFormat.setLenient(false);
try {
embargoDate = dateFormat.parse(date);
} catch (ParseException e) {
result.setContinue(true);
result.setOutcome(false);
result.setMessage(new Message("error", "Date not Valid."));
return result;
}
//We don't want a day before today
Calendar today = Calendar.getInstance();
if(embargoDate.before(today.getTime())){
result.setContinue(true);
result.setOutcome(false);
result.setMessage(new Message("error", "Date not Valid."));
return result;
}
DCDate embargoDcDate = new DCDate(date); //want to reparse the original date; otherwise the java Date class always adds time fields
EmbargoManager.setEmbargo(context, item, embargoDcDate);
item.update();
context.commit();
result.setMessage(new Message("default", "Embargo configured."));
result.setContinue(true);
result.setOutcome(true);
return result;
}
public static FlowResult processLiftEmbargo(Context context, int itemID, Request request) throws SQLException, AuthorizeException, IOException {
FlowResult result = new FlowResult();
result.setContinue(false);
Item item = Item.find(context, itemID);
EmbargoManager.liftEmbargo(context, item);
item.update();
context.commit();
result.setMessage(new Message("default", "Embargo lifted."));
result.setContinue(true);
result.setOutcome(true);
return result;
}
/**
* Flesh out a number to two digits
*
* @param n
* the number
* @return the number as a two-digit string
*/
private static String fleshOut(int n)
{
if (n < 10)
{
return "0" + n;
}
else
{
return String.valueOf(n);
}
}
}
| bsd-3-clause |
rwatson/chromium-capsicum | base/waitable_event_unittest.cc | 2625 | // Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/time.h"
#include "base/waitable_event.h"
#include "base/platform_thread.h"
#include "testing/gtest/include/gtest/gtest.h"
using base::TimeDelta;
using base::WaitableEvent;
namespace {
typedef testing::Test WaitableEventTest;
}
TEST(WaitableEventTest, ManualBasics) {
WaitableEvent event(true, false);
EXPECT_FALSE(event.IsSignaled());
event.Signal();
EXPECT_TRUE(event.IsSignaled());
EXPECT_TRUE(event.IsSignaled());
event.Reset();
EXPECT_FALSE(event.IsSignaled());
EXPECT_FALSE(event.TimedWait(TimeDelta::FromMilliseconds(10)));
event.Signal();
EXPECT_TRUE(event.Wait());
EXPECT_TRUE(event.TimedWait(TimeDelta::FromMilliseconds(10)));
}
TEST(WaitableEventTest, AutoBasics) {
WaitableEvent event(false, false);
EXPECT_FALSE(event.IsSignaled());
event.Signal();
EXPECT_TRUE(event.IsSignaled());
EXPECT_FALSE(event.IsSignaled());
event.Reset();
EXPECT_FALSE(event.IsSignaled());
EXPECT_FALSE(event.TimedWait(TimeDelta::FromMilliseconds(10)));
event.Signal();
EXPECT_TRUE(event.Wait());
EXPECT_FALSE(event.TimedWait(TimeDelta::FromMilliseconds(10)));
event.Signal();
EXPECT_TRUE(event.TimedWait(TimeDelta::FromMilliseconds(10)));
}
TEST(WaitableEventTest, WaitManyShortcut) {
WaitableEvent* ev[5];
for (unsigned i = 0; i < 5; ++i)
ev[i] = new WaitableEvent(false, false);
ev[3]->Signal();
EXPECT_EQ(WaitableEvent::WaitMany(ev, 5), 3u);
ev[3]->Signal();
EXPECT_EQ(WaitableEvent::WaitMany(ev, 5), 3u);
ev[4]->Signal();
EXPECT_EQ(WaitableEvent::WaitMany(ev, 5), 4u);
ev[0]->Signal();
EXPECT_EQ(WaitableEvent::WaitMany(ev, 5), 0u);
for (unsigned i = 0; i < 5; ++i)
delete ev[i];
}
class WaitableEventSignaler : public PlatformThread::Delegate {
public:
WaitableEventSignaler(double seconds, WaitableEvent* ev)
: seconds_(seconds),
ev_(ev) {
}
void ThreadMain() {
PlatformThread::Sleep(static_cast<int>(seconds_ * 1000));
ev_->Signal();
}
private:
const double seconds_;
WaitableEvent *const ev_;
};
TEST(WaitableEventTest, WaitMany) {
WaitableEvent* ev[5];
for (unsigned i = 0; i < 5; ++i)
ev[i] = new WaitableEvent(false, false);
WaitableEventSignaler signaler(0.1, ev[2]);
PlatformThreadHandle thread;
PlatformThread::Create(0, &signaler, &thread);
EXPECT_EQ(WaitableEvent::WaitMany(ev, 5), 2u);
PlatformThread::Join(thread);
for (unsigned i = 0; i < 5; ++i)
delete ev[i];
}
| bsd-3-clause |
nvie/GitPython | git/test/test_db.py | 939 | # test_repo.py
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
from git.db import GitCmdObjectDB
from git.exc import BadObject
from git.test.lib import TestBase
from git.util import bin_to_hex
import os.path as osp
class TestDB(TestBase):
def test_base(self):
gdb = GitCmdObjectDB(osp.join(self.rorepo.git_dir, 'objects'), self.rorepo.git)
# partial to complete - works with everything
hexsha = bin_to_hex(gdb.partial_to_complete_sha_hex("0.1.6"))
assert len(hexsha) == 40
assert bin_to_hex(gdb.partial_to_complete_sha_hex(hexsha[:20])) == hexsha
# fails with BadObject
for invalid_rev in ("0000", "bad/ref", "super bad"):
self.failUnlessRaises(BadObject, gdb.partial_to_complete_sha_hex, invalid_rev)
| bsd-3-clause |
ArturGaspar/scrapy | tests/test_downloadermiddleware_httpproxy.py | 6474 | import os
import sys
from functools import partial
from twisted.trial.unittest import TestCase, SkipTest
from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware
from scrapy.exceptions import NotConfigured
from scrapy.http import Response, Request
from scrapy.spiders import Spider
from scrapy.crawler import Crawler
from scrapy.settings import Settings
spider = Spider('foo')
class TestHttpProxyMiddleware(TestCase):
failureException = AssertionError
def setUp(self):
self._oldenv = os.environ.copy()
def tearDown(self):
os.environ = self._oldenv
def test_not_enabled(self):
settings = Settings({'HTTPPROXY_ENABLED': False})
crawler = Crawler(spider, settings)
self.assertRaises(NotConfigured, partial(HttpProxyMiddleware.from_crawler, crawler))
def test_no_environment_proxies(self):
os.environ = {'dummy_proxy': 'reset_env_and_do_not_raise'}
mw = HttpProxyMiddleware()
for url in ('http://e.com', 'https://e.com', 'file:///tmp/a'):
req = Request(url)
assert mw.process_request(req, spider) is None
self.assertEqual(req.url, url)
self.assertEqual(req.meta, {})
def test_environment_proxies(self):
os.environ['http_proxy'] = http_proxy = 'https://proxy.for.http:3128'
os.environ['https_proxy'] = https_proxy = 'http://proxy.for.https:8080'
os.environ.pop('file_proxy', None)
mw = HttpProxyMiddleware()
for url, proxy in [('http://e.com', http_proxy),
('https://e.com', https_proxy), ('file://tmp/a', None)]:
req = Request(url)
assert mw.process_request(req, spider) is None
self.assertEqual(req.url, url)
self.assertEqual(req.meta.get('proxy'), proxy)
def test_proxy_precedence_meta(self):
os.environ['http_proxy'] = 'https://proxy.com'
mw = HttpProxyMiddleware()
req = Request('http://scrapytest.org', meta={'proxy': 'https://new.proxy:3128'})
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://new.proxy:3128'})
def test_proxy_auth(self):
os.environ['http_proxy'] = 'https://user:pass@proxy:3128'
mw = HttpProxyMiddleware()
req = Request('http://scrapytest.org')
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcjpwYXNz')
# proxy from request.meta
req = Request('http://scrapytest.org', meta={'proxy': 'https://username:password@proxy:3128'})
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcm5hbWU6cGFzc3dvcmQ=')
def test_proxy_auth_empty_passwd(self):
os.environ['http_proxy'] = 'https://user:@proxy:3128'
mw = HttpProxyMiddleware()
req = Request('http://scrapytest.org')
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcjo=')
# proxy from request.meta
req = Request('http://scrapytest.org', meta={'proxy': 'https://username:@proxy:3128'})
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcm5hbWU6')
def test_proxy_auth_encoding(self):
# utf-8 encoding
os.environ['http_proxy'] = u'https://m\u00E1n:pass@proxy:3128'
mw = HttpProxyMiddleware(auth_encoding='utf-8')
req = Request('http://scrapytest.org')
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic bcOhbjpwYXNz')
# proxy from request.meta
req = Request('http://scrapytest.org', meta={'proxy': u'https://\u00FCser:pass@proxy:3128'})
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic w7xzZXI6cGFzcw==')
# default latin-1 encoding
mw = HttpProxyMiddleware(auth_encoding='latin-1')
req = Request('http://scrapytest.org')
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic beFuOnBhc3M=')
# proxy from request.meta, latin-1 encoding
req = Request('http://scrapytest.org', meta={'proxy': u'https://\u00FCser:pass@proxy:3128'})
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic /HNlcjpwYXNz')
def test_proxy_already_seted(self):
os.environ['http_proxy'] = 'https://proxy.for.http:3128'
mw = HttpProxyMiddleware()
req = Request('http://noproxy.com', meta={'proxy': None})
assert mw.process_request(req, spider) is None
assert 'proxy' in req.meta and req.meta['proxy'] is None
def test_no_proxy(self):
os.environ['http_proxy'] = 'https://proxy.for.http:3128'
mw = HttpProxyMiddleware()
os.environ['no_proxy'] = '*'
req = Request('http://noproxy.com')
assert mw.process_request(req, spider) is None
assert 'proxy' not in req.meta
os.environ['no_proxy'] = 'other.com'
req = Request('http://noproxy.com')
assert mw.process_request(req, spider) is None
assert 'proxy' in req.meta
os.environ['no_proxy'] = 'other.com,noproxy.com'
req = Request('http://noproxy.com')
assert mw.process_request(req, spider) is None
assert 'proxy' not in req.meta
# proxy from meta['proxy'] takes precedence
os.environ['no_proxy'] = '*'
req = Request('http://noproxy.com', meta={'proxy': 'http://proxy.com'})
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'http://proxy.com'})
| bsd-3-clause |
SitePen/mayhem-bower | _debug/ui/dom/events/PointerManager.ts | 15288 | import domUtil = require('../util');
import has = require('../../../has');
import lang = require('dojo/_base/lang');
import ui = require('../../interfaces');
import util = require('../../../util');
enum Keys {
ALT = 18,
COMMAND_LEFT = 91,
COMMAND_RIGHT = 93,
CONTROL = 17,
META = 224,
SHIFT = 16
}
interface TouchEvent extends UIEvent {
changedTouches:TouchEvent.TouchList;
altKey:boolean;
ctrlKey:boolean;
metaKey:boolean;
shiftKey:boolean;
targetTouches:TouchEvent.TouchList;
touches:TouchEvent.TouchList;
}
module TouchEvent {
export interface Touch {
clientX:number;
clientY:number;
identifier:number;
pageX:number;
pageY:number;
screenX:number;
screenY:number;
target:EventTarget;
}
export interface TouchList {
[index:number]:TouchEvent.Touch;
item(index:number):TouchEvent.Touch;
length:number;
}
}
function createModifierSetter(value:boolean):EventListener {
return function (event:KeyboardEvent):void {
var isMac:boolean = navigator.platform.indexOf('Mac') === 0;
switch (event.keyCode) {
case Keys.ALT:
keyboard.alt = value;
break;
case Keys.COMMAND_LEFT:
case Keys.COMMAND_RIGHT:
case Keys.META:
keyboard.meta = value;
keyboard.shortcut = value && isMac;
break;
case Keys.CONTROL:
keyboard.control = value;
keyboard.shortcut = value && !isMac;
break;
case Keys.SHIFT:
keyboard.shift = value;
break;
}
};
}
function keyDiff(oldObject:PointerManager.Pointer, newObject:PointerManager.Pointer):PointerManager.Changes {
return {
buttons: oldObject.buttons !== newObject.buttons,
clientX: oldObject.clientX !== newObject.clientX,
clientY: oldObject.clientY !== newObject.clientY,
height: oldObject.height !== newObject.height,
pressure: oldObject.pressure !== newObject.pressure,
tiltX: oldObject.tiltX !== newObject.tiltX,
tiltY: oldObject.tiltY !== newObject.tiltY,
width: oldObject.width !== newObject.width
};
}
function mixin(target:PointerManager.Pointer, source:PointerManager.Pointer):PointerManager.Pointer {
// TS7017
var _target:any = target;
var _source:any = source;
for (var key in _source) {
if (key === 'lastState' || key === 'lastChanged') {
continue;
}
else if (key === 'modifiers') {
_target[key] = lang.mixin({}, _source[key]);
}
else {
_target[key] = _source[key];
}
}
return _target;
}
var keyboard:ui.PointerEvent.Modifiers = {
alt: false,
control: false,
meta: false,
shift: false,
shortcut: false
};
var nativeEventMap:HashMap<string> = {
mousedown: 'change',
mouseenter: 'add',
mouseleave: 'remove',
mousemove: 'change',
mouseup: 'change',
MSPointerCancel: 'cancel',
MSPointerDown: 'change',
MSPointerEnter: 'add',
MSPointerHover: 'change',
MSPointerLeave: 'remove',
MSPointerMove: 'change',
MSPointerUp: 'change',
pointercancel: 'cancel',
pointerdown: 'change',
pointerenter: 'add',
pointerleave: 'remove',
pointermove: 'change',
pointerup: 'change',
touchcancel: 'cancel',
touchend: 'remove',
touchmove: 'change',
touchstart: 'add'
};
class PointerManager {
private _handles:IHandle[] = [];
private _listeners:{ [type:string]:PointerManager.Listener[]; };
pointers:{
[pointerId:number]:PointerManager.Pointer;
numActive:number;
};
private static _keyboardActive:boolean = false;
constructor(root:EventTarget) {
var handles:IHandle[] = this._handles = [];
this._listeners = {};
var pointers:{
[pointerId:number]:PointerManager.Pointer;
numActive:number;
} = this.pointers = { numActive: 0 };
var self = this;
// TODO: Maybe not all information should be cleared from the pointer? Due to issues with cancel/touchend where
// necessary properties disappeared before emitting the event
function clearPointer(pointerId:number):PointerManager.Pointer {
var pointer:PointerManager.Pointer = pointers[pointerId];
mixin(pointer.lastState, pointer);
for (var key in pointer) {
if (key === 'lastState' || key === 'pointerId' || key === 'pointerType' || key === 'timestamp') {
continue;
}
// TS7017
(<any> pointer)[key] = null;
}
pointer.isActive = false;
--pointers.numActive;
return pointer;
}
if (!PointerManager._keyboardActive) {
domUtil.on(window, 'keydown', createModifierSetter(true));
domUtil.on(window, 'keyup', createModifierSetter(false));
PointerManager._keyboardActive = true;
}
if (has('dom-pointerevents') || has('dom-mspointerevents')) {
var pointerChanged = function (event:PointerEvent):void {
// Since we are listening on capture phase we need to discard child events that do not belong to us
if (event.type === (has('dom-pointerevents') ? 'pointerenter' : 'MSPointerEnter') && event.target !== root) {
return;
}
var pointer:PointerManager.Pointer = pointers[event.pointerId];
if (!pointer) {
pointer = pointers[event.pointerId] = <any> { lastChanged: [], lastState: {} };
}
if (!pointer.isActive) {
if (has('dom-pointerevents')) {
(<Element> root).setPointerCapture(event.pointerId);
}
else /* has('dom-mspointerevents') */ {
(<Element> root).msSetPointerCapture(event.pointerId);
}
pointer.isActive = true;
++pointers.numActive;
}
pointer.lastState = mixin(pointer.lastState, pointer);
pointer.buttons = event.buttons;
pointer.clientX = event.clientX;
pointer.clientY = event.clientY;
pointer.height = event.height;
pointer.isPrimary = event.isPrimary;
pointer.modifiers = lang.mixin(<ui.PointerEvent.Modifiers> {}, keyboard),
pointer.pointerId = event.pointerId;
pointer.pointerType = event.pointerType;
pointer.pressure = event.pressure;
pointer.tiltX = event.tiltX;
pointer.tiltY = event.tiltY;
pointer.timestamp = event.timeStamp;
pointer.width = event.width;
pointer.lastChanged = keyDiff(pointer.lastState, pointer);
self._emit(event, pointer);
};
var pointerRemoved = function (event:PointerEvent):void {
// Since we are listening on capture phase we need to discard child events that do not belong to us
if (event.type === (has('dom-pointerevents') ? 'pointerleave' : 'MSPointerLeave') && event.target !== root) {
return;
}
var pointer:PointerManager.Pointer = clearPointer(event.pointerId);
self._emit(event, pointer);
};
if (has('dom-pointerevents')) {
handles.push(
domUtil.on(root, 'pointercancel', pointerRemoved),
domUtil.on(root, 'pointerdown', pointerChanged),
domUtil.on(root, 'pointerenter', pointerChanged),
domUtil.on(root, 'pointerleave', pointerRemoved),
domUtil.on(root, 'pointermove', pointerChanged),
domUtil.on(root, 'pointerup', pointerChanged)
);
}
else /* has('dom-mspointerevents') */ {
handles.push(
domUtil.on(root, 'MSPointerCancel', pointerRemoved),
domUtil.on(root, 'MSPointerDown', pointerChanged),
domUtil.on(root, 'MSPointerEnter', pointerChanged),
domUtil.on(root, 'MSPointerHover', pointerChanged),
domUtil.on(root, 'MSPointerLeave', pointerRemoved),
domUtil.on(root, 'MSPointerMove', pointerChanged),
domUtil.on(root, 'MSPointerUp', pointerChanged)
);
}
}
else {
if (has('dom-touch')) {
var primaryId:number;
var FINGER_SIZE:number = 22;
var touchChanged = function (event:TouchEvent):void {
// Mouse is currently controlling, stop and ignore touch events
if (pointers[NaN] && pointers[NaN].isActive) {
event.preventDefault();
return;
}
for (var i:number = 0, touch:TouchEvent.Touch; (touch = event.changedTouches[i]); ++i) {
var pointer:PointerManager.Pointer = pointers[touch.identifier];
if (!pointer) {
pointer = pointers[touch.identifier] = <any> { lastChanged: [], lastState: {} };
}
if (!pointer.isActive) {
pointer.isActive = true;
++pointers.numActive;
}
if (!primaryId) {
primaryId = touch.identifier;
}
pointer.lastState = mixin(pointer.lastState, pointer);
pointer.buttons = 1;
pointer.clientX = touch.clientX;
pointer.clientY = touch.clientY;
pointer.height = FINGER_SIZE;
pointer.isPrimary = touch.identifier === primaryId;
pointer.modifiers = lang.mixin(<ui.PointerEvent.Modifiers> {}, keyboard),
pointer.pointerId = touch.identifier;
pointer.pointerType = 'touch';
pointer.pressure = 0.5;
pointer.tiltX = 0;
pointer.tiltY = 0;
pointer.timestamp = event.timeStamp;
pointer.width = FINGER_SIZE;
pointer.lastChanged = keyDiff(pointer.lastState, pointer);
self._emit(event, pointer);
}
};
var touchRemoved = function (event:TouchEvent):void {
event.preventDefault();
for (var i:number = 0, touch:TouchEvent.Touch; (touch = event.changedTouches[i]); ++i) {
var pointer:PointerManager.Pointer = clearPointer(touch.identifier);
// Secondary pointers are not promoted to primary once the primary leaves the surface; a
// new primary can only exist after all touches are gone
if (!event.touches.length) {
primaryId = null;
}
self._emit(event, pointer);
}
};
handles.push(
domUtil.on(root, 'touchcancel', touchRemoved),
domUtil.on(root, 'touchend', touchRemoved),
domUtil.on(root, 'touchmove', touchChanged),
domUtil.on(root, 'touchstart', touchChanged)
);
}
// Android can have both touch screen and mouse simultaneously (Chromebook)
if (has('dom-mouse')) {
if (!has('dom-mouse-buttons')) {
var isButtonPressed:boolean = false;
}
var mouseChanged = function (event:MouseEvent):void {
// Touch is currently controlling, stop and ignore mouse events
if (pointers.numActive > 0 && (!pointers[NaN] || !pointers[NaN].isActive)) {
event.preventDefault();
return;
}
if (!has('dom-mouse-buttons')) {
if (event.type === 'mousedown') {
isButtonPressed = true;
}
else if (event.type === 'mouseup') {
isButtonPressed = false;
}
}
var pointer:PointerManager.Pointer = pointers[NaN];
if (!pointer) {
pointer = pointers[NaN] = <any> { lastChanged: [], lastState: {} };
}
// If the pointer is already active it should never be activated again
if (event.type === 'mouseenter' && pointer.isActive) {
return;
}
if (!pointer.isActive) {
pointer.isActive = true;
++pointers.numActive;
}
pointer.lastState = mixin(pointer.lastState, pointer);
if (has('dom-mouse-buttons')) {
pointer.buttons = event.buttons;
}
else {
// `buttons` API: 1 = LMB, 2 = *R*MB, 4 = *M*MB, ...
// `button` API: 0 = LMB, 1 = *M*MB, 2 = *R*MB, ...
// So, we need to reverse MMB and RMB
var buttonMap: HashMap<number> = {
1: 2,
2: 1
};
pointer.buttons = isButtonPressed ? Math.pow(2, buttonMap[event.button] || event.button) : 0;
}
pointer.clientX = event.clientX;
pointer.clientY = event.clientY;
pointer.height = 0;
pointer.isPrimary = true;
pointer.modifiers = lang.mixin(<ui.PointerEvent.Modifiers> {}, keyboard),
pointer.pointerId = NaN;
pointer.pointerType = 'mouse';
pointer.pressure = pointer.buttons > 0 ? 0.5 : 0;
pointer.tiltX = 0;
pointer.tiltY = 0;
pointer.timestamp = event.timeStamp || /* has('ie') === 8 */ +new Date();
pointer.width = 0;
pointer.lastChanged = keyDiff(pointer.lastState, pointer);
self._emit(event, pointer);
};
var mouseRemoved = function (event:MouseEvent):void {
// Since we are listening on capture phase we need to discard child events that do not belong to us
if (event.type === 'mouseleave' && event.target !== root) {
return;
}
var pointer:PointerManager.Pointer = clearPointer(NaN);
self._emit(event, pointer);
};
handles.push(
domUtil.on(root, 'mousedown', mouseChanged),
domUtil.on(root, 'mouseenter', mouseChanged),
domUtil.on(root, 'mouseleave', mouseRemoved),
domUtil.on(root, 'mousemove', mouseChanged),
domUtil.on(root, 'mouseup', mouseChanged)
);
if (has('dom-dblclick-bug')) {
handles.push(domUtil.on(root, 'dblclick', function (event:MouseEvent):void {
// since the `type` property is being changed, we must copy to a fake event, since trying to
// modify this property is immutable on the native object
var fakeEvent:MouseEvent = <any> {
button: 0,
clientX: event.clientX,
clientY: event.clientY,
target: event.target,
type: 'mousedown'
};
mouseChanged(fakeEvent);
fakeEvent.type = 'mouseup';
mouseChanged(fakeEvent);
}));
}
// it is impossible to know whether or not the mouse button was released outside the window without
// `buttons` but at least we can know if it was released anywhere in the window
if (!has('dom-mouse-buttons')) {
handles.push(
domUtil.on(window, 'mouseup', function (event:MouseEvent):void {
isButtonPressed = false;
}),
domUtil.on(window, 'mouseleave', function (event:MouseEvent):void {
if (event.target === window) {
isButtonPressed = false;
}
})
);
}
}
}
}
destroy():void {
this.destroy = function ():void {};
var handle:IHandle;
while ((handle = this._handles.pop())) {
handle.remove();
}
this._handles = this._listeners = null;
}
private _emit(event:Event, pointer:PointerManager.Pointer):void {
var type:string = nativeEventMap[event.type];
var listeners:PointerManager.Listener[] = this._listeners[type];
if (!listeners) {
return;
}
for (var i:number = 0, listener:PointerManager.Listener; (listener = listeners[i]); ++i) {
if (listener.call(this, pointer)) {
event.preventDefault();
}
}
}
on(type:'add', listener:PointerManager.Listener):IHandle;
on(type:'cancel', listener:PointerManager.Listener):IHandle;
on(type:'change', listener:PointerManager.Listener):IHandle;
on(type:'remove', listener:PointerManager.Listener):IHandle;
on(type:string, listener:PointerManager.Listener):void;
on(type:string, listener:PointerManager.Listener):IHandle {
var listeners:PointerManager.Listener[] = this._listeners[type];
if (!listeners) {
listeners = this._listeners[type] = [];
}
listeners.push(listener);
return util.createHandle(function () {
util.spliceMatch(listeners, listener);
listeners = listener = null;
});
}
}
module PointerManager {
export interface Changes {
buttons:boolean;
clientX:boolean;
clientY:boolean;
height:boolean;
pressure:boolean;
tiltX:boolean;
tiltY:boolean;
width:boolean;
}
export interface Listener {
(pointer:PointerManager.Pointer):boolean;
}
export interface Pointer {
buttons:number;
clientX:number;
clientY:number;
height:number;
isActive:boolean;
isPrimary:boolean;
lastChanged:PointerManager.Changes;
lastState:Pointer;
modifiers:ui.PointerEvent.Modifiers;
pointerId:number;
pointerType:string;
pressure:number;
tiltX:number;
tiltY:number;
timestamp:number;
width:number;
}
}
export = PointerManager;
| bsd-3-clause |
MakMukhi/grpc | src/boringssl/gen_build_yaml.py | 5374 | #!/usr/bin/env python2.7
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import shutil
import sys
import os
import yaml
sys.dont_write_bytecode = True
boring_ssl_root = os.path.abspath(os.path.join(
os.path.dirname(sys.argv[0]),
'../../third_party/boringssl'))
sys.path.append(os.path.join(boring_ssl_root, 'util'))
try:
import generate_build_files
except ImportError:
print yaml.dump({})
sys.exit()
def map_dir(filename):
if filename[0:4] == 'src/':
return 'third_party/boringssl/' + filename[4:]
else:
return 'src/boringssl/' + filename
def map_testarg(arg):
if '/' in arg:
return 'third_party/boringssl/' + arg
else:
return arg
class Grpc(object):
yaml = None
def WriteFiles(self, files, asm_outputs):
self.yaml = {
'#': 'generated with tools/buildgen/gen_boring_ssl_build_yaml.py',
'raw_boringssl_build_output_for_debugging': {
'files': files,
'asm_outputs': asm_outputs,
},
'libs': [
{
'name': 'boringssl',
'build': 'private',
'language': 'c',
'secure': 'no',
'src': sorted(
map_dir(f)
for f in files['ssl'] + files['crypto']
),
'headers': sorted(
map_dir(f)
for f in files['ssl_headers'] + files['ssl_internal_headers'] + files['crypto_headers'] + files['crypto_internal_headers']
),
'boringssl': True,
'defaults': 'boringssl',
},
{
'name': 'boringssl_test_util',
'build': 'private',
'language': 'c++',
'secure': 'no',
'boringssl': True,
'defaults': 'boringssl',
'src': [
map_dir(f)
for f in sorted(files['test_support'])
],
}
] + [
{
'name': 'boringssl_%s_lib' % os.path.splitext(os.path.basename(test))[0],
'build': 'private',
'secure': 'no',
'language': 'c' if os.path.splitext(test)[1] == '.c' else 'c++',
'src': [map_dir(test)],
'vs_proj_dir': 'test/boringssl',
'boringssl': True,
'defaults': 'boringssl',
'deps': [
'boringssl_test_util',
'boringssl',
]
}
for test in sorted(files['test'])
],
'targets': [
{
'name': 'boringssl_%s' % os.path.splitext(os.path.basename(test))[0],
'build': 'test',
'run': False,
'secure': 'no',
'language': 'c++',
'src': [],
'vs_proj_dir': 'test/boringssl',
'boringssl': True,
'defaults': 'boringssl',
'deps': [
'boringssl_%s_lib' % os.path.splitext(os.path.basename(test))[0],
'boringssl_test_util',
'boringssl',
]
}
for test in sorted(files['test'])
],
'tests': [
{
'name': 'boringssl_%s' % os.path.basename(test[0]),
'args': [map_testarg(arg) for arg in test[1:]],
'exclude_configs': ['asan', 'ubsan'],
'ci_platforms': ['linux', 'mac', 'posix', 'windows'],
'platforms': ['linux', 'mac', 'posix', 'windows'],
'flaky': False,
'language': 'c++',
'boringssl': True,
'defaults': 'boringssl',
'cpu_cost': 1.0
}
for test in files['tests']
]
}
os.chdir(os.path.dirname(sys.argv[0]))
os.mkdir('src')
try:
for f in os.listdir(boring_ssl_root):
os.symlink(os.path.join(boring_ssl_root, f),
os.path.join('src', f))
g = Grpc()
generate_build_files.main([g])
print yaml.dump(g.yaml)
finally:
shutil.rmtree('src')
| bsd-3-clause |
kf6kjg/halcyon | OpenSim/Region/Physics/ConvexDecompositionDotNet/ConvexBuilder.cs | 13638 | /* The MIT License
*
* Copyright (c) 2010 Intel Corporation.
* All rights reserved.
*
* Based on the convexdecomposition library from
* <http://codesuppository.googlecode.com> by John W. Ratcliff and Stan Melax.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
using System;
using System.Collections.Generic;
using System.Diagnostics;
namespace OpenSim.Region.Physics.ConvexDecompositionDotNet
{
public class DecompDesc
{
public List<float3> mVertices;
public List<int> mIndices;
// options
public uint mDepth; // depth to split, a maximum of 10, generally not over 7.
public float mCpercent; // the concavity threshold percentage. 0=20 is reasonable.
public float mPpercent; // the percentage volume conservation threshold to collapse hulls. 0-30 is reasonable.
// hull output limits.
public uint mMaxVertices; // maximum number of vertices in the output hull. Recommended 32 or less.
public float mSkinWidth; // a skin width to apply to the output hulls.
public ConvexDecompositionCallback mCallback; // the interface to receive back the results.
public DecompDesc()
{
mDepth = 5;
mCpercent = 5;
mPpercent = 5;
mMaxVertices = 32;
}
}
public class CHull
{
public float[] mMin = new float[3];
public float[] mMax = new float[3];
public float mVolume;
public float mDiagonal;
public ConvexResult mResult;
public CHull(ConvexResult result)
{
mResult = new ConvexResult(result);
mVolume = Concavity.computeMeshVolume(result.HullVertices, result.HullIndices);
mDiagonal = getBoundingRegion(result.HullVertices, mMin, mMax);
float dx = mMax[0] - mMin[0];
float dy = mMax[1] - mMin[1];
float dz = mMax[2] - mMin[2];
dx *= 0.1f; // inflate 1/10th on each edge
dy *= 0.1f; // inflate 1/10th on each edge
dz *= 0.1f; // inflate 1/10th on each edge
mMin[0] -= dx;
mMin[1] -= dy;
mMin[2] -= dz;
mMax[0] += dx;
mMax[1] += dy;
mMax[2] += dz;
}
public void Dispose()
{
mResult = null;
}
public bool overlap(CHull h)
{
return overlapAABB(mMin, mMax, h.mMin, h.mMax);
}
// returns the d1Giagonal distance
private static float getBoundingRegion(List<float3> points, float[] bmin, float[] bmax)
{
float3 first = points[0];
bmin[0] = first.x;
bmin[1] = first.y;
bmin[2] = first.z;
bmax[0] = first.x;
bmax[1] = first.y;
bmax[2] = first.z;
for (int i = 1; i < points.Count; i++)
{
float3 p = points[i];
if (p[0] < bmin[0]) bmin[0] = p[0];
if (p[1] < bmin[1]) bmin[1] = p[1];
if (p[2] < bmin[2]) bmin[2] = p[2];
if (p[0] > bmax[0]) bmax[0] = p[0];
if (p[1] > bmax[1]) bmax[1] = p[1];
if (p[2] > bmax[2]) bmax[2] = p[2];
}
float dx = bmax[0] - bmin[0];
float dy = bmax[1] - bmin[1];
float dz = bmax[2] - bmin[2];
return (float)Math.Sqrt(dx * dx + dy * dy + dz * dz);
}
// return true if the two AABB's overlap.
private static bool overlapAABB(float[] bmin1, float[] bmax1, float[] bmin2, float[] bmax2)
{
if (bmax2[0] < bmin1[0]) return false; // if the maximum is less than our minimum on any axis
if (bmax2[1] < bmin1[1]) return false;
if (bmax2[2] < bmin1[2]) return false;
if (bmin2[0] > bmax1[0]) return false; // if the minimum is greater than our maximum on any axis
if (bmin2[1] > bmax1[1]) return false; // if the minimum is greater than our maximum on any axis
if (bmin2[2] > bmax1[2]) return false; // if the minimum is greater than our maximum on any axis
return true; // the extents overlap
}
}
public class ConvexBuilder
{
public List<CHull> mChulls = new List<CHull>();
private ConvexDecompositionCallback mCallback;
private int MAXDEPTH = 8;
private float CONCAVE_PERCENT = 1f;
private float MERGE_PERCENT = 2f;
public ConvexBuilder(ConvexDecompositionCallback callback)
{
mCallback = callback;
}
public void Dispose()
{
int i;
for (i = 0; i < mChulls.Count; i++)
{
CHull cr = mChulls[i];
cr.Dispose();
}
}
public bool isDuplicate(uint i1, uint i2, uint i3, uint ci1, uint ci2, uint ci3)
{
uint dcount = 0;
Debug.Assert(i1 != i2 && i1 != i3 && i2 != i3);
Debug.Assert(ci1 != ci2 && ci1 != ci3 && ci2 != ci3);
if (i1 == ci1 || i1 == ci2 || i1 == ci3)
dcount++;
if (i2 == ci1 || i2 == ci2 || i2 == ci3)
dcount++;
if (i3 == ci1 || i3 == ci2 || i3 == ci3)
dcount++;
return dcount == 3;
}
public void getMesh(ConvexResult cr, VertexPool vc, List<int> indices)
{
List<int> src = cr.HullIndices;
for (int i = 0; i < src.Count / 3; i++)
{
int i1 = src[i * 3 + 0];
int i2 = src[i * 3 + 1];
int i3 = src[i * 3 + 2];
float3 p1 = cr.HullVertices[i1];
float3 p2 = cr.HullVertices[i2];
float3 p3 = cr.HullVertices[i3];
i1 = vc.getIndex(p1);
i2 = vc.getIndex(p2);
i3 = vc.getIndex(p3);
}
}
public CHull canMerge(CHull a, CHull b)
{
if (!a.overlap(b)) // if their AABB's (with a little slop) don't overlap, then return.
return null;
CHull ret = null;
// ok..we are going to combine both meshes into a single mesh
// and then we are going to compute the concavity...
VertexPool vc = new VertexPool();
List<int> indices = new List<int>();
getMesh(a.mResult, vc, indices);
getMesh(b.mResult, vc, indices);
List<float3> vertices = vc.GetVertices();
int tcount = indices.Count / 3;
//don't do anything if hull is empty
if (tcount == 0)
{
vc.Clear();
return null;
}
HullResult hresult = new HullResult();
HullDesc desc = new HullDesc();
desc.SetHullFlag(HullFlag.QF_TRIANGLES);
desc.Vertices = vertices;
HullError hret = HullUtils.CreateConvexHull(desc, ref hresult);
if (hret == HullError.QE_OK)
{
float combineVolume = Concavity.computeMeshVolume(hresult.OutputVertices, hresult.Indices);
float sumVolume = a.mVolume + b.mVolume;
float percent = (sumVolume * 100) / combineVolume;
if (percent >= (100.0f - MERGE_PERCENT))
{
ConvexResult cr = new ConvexResult(hresult.OutputVertices, hresult.Indices);
ret = new CHull(cr);
}
}
vc.Clear();
return ret;
}
public bool combineHulls()
{
bool combine = false;
sortChulls(mChulls); // sort the convex hulls, largest volume to least...
List<CHull> output = new List<CHull>(); // the output hulls...
int i;
for (i = 0; i < mChulls.Count && !combine; ++i)
{
CHull cr = mChulls[i];
int j;
for (j = 0; j < mChulls.Count; j++)
{
CHull match = mChulls[j];
if (cr != match) // don't try to merge a hull with itself, that be stoopid
{
CHull merge = canMerge(cr, match); // if we can merge these two....
if (merge != null)
{
output.Add(merge);
++i;
while (i != mChulls.Count)
{
CHull cr2 = mChulls[i];
if (cr2 != match)
{
output.Add(cr2);
}
i++;
}
cr.Dispose();
match.Dispose();
combine = true;
break;
}
}
}
if (combine)
{
break;
}
else
{
output.Add(cr);
}
}
if (combine)
{
mChulls.Clear();
mChulls = output;
output.Clear();
}
return combine;
}
public int process(DecompDesc desc)
{
int ret = 0;
MAXDEPTH = (int)desc.mDepth;
CONCAVE_PERCENT = desc.mCpercent;
MERGE_PERCENT = desc.mPpercent;
ConvexDecomposition.calcConvexDecomposition(desc.mVertices, desc.mIndices, ConvexDecompResult, 0f, 0, MAXDEPTH, CONCAVE_PERCENT, MERGE_PERCENT);
while (combineHulls()) // keep combinging hulls until I can't combine any more...
;
int i;
for (i = 0; i < mChulls.Count; i++)
{
CHull cr = mChulls[i];
// before we hand it back to the application, we need to regenerate the hull based on the
// limits given by the user.
ConvexResult c = cr.mResult; // the high resolution hull...
HullResult result = new HullResult();
HullDesc hdesc = new HullDesc();
hdesc.SetHullFlag(HullFlag.QF_TRIANGLES);
hdesc.Vertices = c.HullVertices;
hdesc.MaxVertices = desc.mMaxVertices; // maximum number of vertices allowed in the output
if (desc.mSkinWidth != 0f)
{
hdesc.SkinWidth = desc.mSkinWidth;
hdesc.SetHullFlag(HullFlag.QF_SKIN_WIDTH); // do skin width computation.
}
HullError ret2 = HullUtils.CreateConvexHull(hdesc, ref result);
if (ret2 == HullError.QE_OK)
{
ConvexResult r = new ConvexResult(result.OutputVertices, result.Indices);
r.mHullVolume = Concavity.computeMeshVolume(result.OutputVertices, result.Indices); // the volume of the hull.
// compute the best fit OBB
//computeBestFitOBB(result.mNumOutputVertices, result.mOutputVertices, sizeof(float) * 3, r.mOBBSides, r.mOBBTransform);
//r.mOBBVolume = r.mOBBSides[0] * r.mOBBSides[1] * r.mOBBSides[2]; // compute the OBB volume.
//fm_getTranslation(r.mOBBTransform, r.mOBBCenter); // get the translation component of the 4x4 matrix.
//fm_matrixToQuat(r.mOBBTransform, r.mOBBOrientation); // extract the orientation as a quaternion.
//r.mSphereRadius = computeBoundingSphere(result.mNumOutputVertices, result.mOutputVertices, r.mSphereCenter);
//r.mSphereVolume = fm_sphereVolume(r.mSphereRadius);
mCallback(r);
}
result = null;
cr.Dispose();
}
ret = mChulls.Count;
mChulls.Clear();
return ret;
}
public void ConvexDecompResult(ConvexResult result)
{
CHull ch = new CHull(result);
mChulls.Add(ch);
}
public void sortChulls(List<CHull> hulls)
{
hulls.Sort(delegate(CHull a, CHull b) { return a.mVolume.CompareTo(b.mVolume); });
}
}
}
| bsd-3-clause |
jamestwebber/scipy | scipy/io/matlab/tests/test_streams.py | 7761 | """ Testing
"""
from __future__ import division, print_function, absolute_import
import os
import sys
import zlib
from io import BytesIO
if sys.version_info[0] >= 3:
cStringIO = BytesIO
else:
from cStringIO import StringIO as cStringIO
from tempfile import mkstemp
from contextlib import contextmanager
import numpy as np
from numpy.testing import assert_, assert_equal
from pytest import raises as assert_raises
from scipy.io.matlab.streams import (make_stream,
GenericStream, cStringStream, FileStream, ZlibInputStream,
_read_into, _read_string, BLOCK_SIZE)
IS_PYPY = ('__pypy__' in sys.modules)
@contextmanager
def setup_test_file():
val = b'a\x00string'
fd, fname = mkstemp()
with os.fdopen(fd, 'wb') as fs:
fs.write(val)
with open(fname, 'rb') as fs:
gs = BytesIO(val)
cs = cStringIO(val)
yield fs, gs, cs
os.unlink(fname)
def test_make_stream():
with setup_test_file() as (fs, gs, cs):
# test stream initialization
assert_(isinstance(make_stream(gs), GenericStream))
if sys.version_info[0] < 3 and not IS_PYPY:
assert_(isinstance(make_stream(cs), cStringStream))
assert_(isinstance(make_stream(fs), FileStream))
def test_tell_seek():
with setup_test_file() as (fs, gs, cs):
for s in (fs, gs, cs):
st = make_stream(s)
res = st.seek(0)
assert_equal(res, 0)
assert_equal(st.tell(), 0)
res = st.seek(5)
assert_equal(res, 0)
assert_equal(st.tell(), 5)
res = st.seek(2, 1)
assert_equal(res, 0)
assert_equal(st.tell(), 7)
res = st.seek(-2, 2)
assert_equal(res, 0)
assert_equal(st.tell(), 6)
def test_read():
with setup_test_file() as (fs, gs, cs):
for s in (fs, gs, cs):
st = make_stream(s)
st.seek(0)
res = st.read(-1)
assert_equal(res, b'a\x00string')
st.seek(0)
res = st.read(4)
assert_equal(res, b'a\x00st')
# read into
st.seek(0)
res = _read_into(st, 4)
assert_equal(res, b'a\x00st')
res = _read_into(st, 4)
assert_equal(res, b'ring')
assert_raises(IOError, _read_into, st, 2)
# read alloc
st.seek(0)
res = _read_string(st, 4)
assert_equal(res, b'a\x00st')
res = _read_string(st, 4)
assert_equal(res, b'ring')
assert_raises(IOError, _read_string, st, 2)
class TestZlibInputStream(object):
def _get_data(self, size):
data = np.random.randint(0, 256, size).astype(np.uint8).tostring()
compressed_data = zlib.compress(data)
stream = BytesIO(compressed_data)
return stream, len(compressed_data), data
def test_read(self):
SIZES = [0, 1, 10, BLOCK_SIZE//2, BLOCK_SIZE-1,
BLOCK_SIZE, BLOCK_SIZE+1, 2*BLOCK_SIZE-1]
READ_SIZES = [BLOCK_SIZE//2, BLOCK_SIZE-1,
BLOCK_SIZE, BLOCK_SIZE+1]
def check(size, read_size):
compressed_stream, compressed_data_len, data = self._get_data(size)
stream = ZlibInputStream(compressed_stream, compressed_data_len)
data2 = b''
so_far = 0
while True:
block = stream.read(min(read_size,
size - so_far))
if not block:
break
so_far += len(block)
data2 += block
assert_equal(data, data2)
for size in SIZES:
for read_size in READ_SIZES:
check(size, read_size)
def test_read_max_length(self):
size = 1234
data = np.random.randint(0, 256, size).astype(np.uint8).tostring()
compressed_data = zlib.compress(data)
compressed_stream = BytesIO(compressed_data + b"abbacaca")
stream = ZlibInputStream(compressed_stream, len(compressed_data))
stream.read(len(data))
assert_equal(compressed_stream.tell(), len(compressed_data))
assert_raises(IOError, stream.read, 1)
def test_read_bad_checksum(self):
data = np.random.randint(0, 256, 10).astype(np.uint8).tostring()
compressed_data = zlib.compress(data)
# break checksum
compressed_data = compressed_data[:-1] + bytes([(compressed_data[-1] + 1) & 255])
compressed_stream = BytesIO(compressed_data)
stream = ZlibInputStream(compressed_stream, len(compressed_data))
assert_raises(zlib.error, stream.read, len(data))
def test_seek(self):
compressed_stream, compressed_data_len, data = self._get_data(1024)
stream = ZlibInputStream(compressed_stream, compressed_data_len)
stream.seek(123)
p = 123
assert_equal(stream.tell(), p)
d1 = stream.read(11)
assert_equal(d1, data[p:p+11])
stream.seek(321, 1)
p = 123+11+321
assert_equal(stream.tell(), p)
d2 = stream.read(21)
assert_equal(d2, data[p:p+21])
stream.seek(641, 0)
p = 641
assert_equal(stream.tell(), p)
d3 = stream.read(11)
assert_equal(d3, data[p:p+11])
assert_raises(IOError, stream.seek, 10, 2)
assert_raises(IOError, stream.seek, -1, 1)
assert_raises(ValueError, stream.seek, 1, 123)
stream.seek(10000, 1)
assert_raises(IOError, stream.read, 12)
def test_seek_bad_checksum(self):
data = np.random.randint(0, 256, 10).astype(np.uint8).tostring()
compressed_data = zlib.compress(data)
# break checksum
compressed_data = compressed_data[:-1] + bytes([(compressed_data[-1] + 1) & 255])
compressed_stream = BytesIO(compressed_data)
stream = ZlibInputStream(compressed_stream, len(compressed_data))
assert_raises(zlib.error, stream.seek, len(data))
def test_all_data_read(self):
compressed_stream, compressed_data_len, data = self._get_data(1024)
stream = ZlibInputStream(compressed_stream, compressed_data_len)
assert_(not stream.all_data_read())
stream.seek(512)
assert_(not stream.all_data_read())
stream.seek(1024)
assert_(stream.all_data_read())
def test_all_data_read_overlap(self):
COMPRESSION_LEVEL = 6
data = np.arange(33707000).astype(np.uint8).tostring()
compressed_data = zlib.compress(data, COMPRESSION_LEVEL)
compressed_data_len = len(compressed_data)
# check that part of the checksum overlaps
assert_(compressed_data_len == BLOCK_SIZE + 2)
compressed_stream = BytesIO(compressed_data)
stream = ZlibInputStream(compressed_stream, compressed_data_len)
assert_(not stream.all_data_read())
stream.seek(len(data))
assert_(stream.all_data_read())
def test_all_data_read_bad_checksum(self):
COMPRESSION_LEVEL = 6
data = np.arange(33707000).astype(np.uint8).tostring()
compressed_data = zlib.compress(data, COMPRESSION_LEVEL)
compressed_data_len = len(compressed_data)
# check that part of the checksum overlaps
assert_(compressed_data_len == BLOCK_SIZE + 2)
# break checksum
compressed_data = compressed_data[:-1] + bytes([(compressed_data[-1] + 1) & 255])
compressed_stream = BytesIO(compressed_data)
stream = ZlibInputStream(compressed_stream, compressed_data_len)
assert_(not stream.all_data_read())
stream.seek(len(data))
assert_raises(zlib.error, stream.all_data_read)
| bsd-3-clause |
firebug/firebug-lite | content/firediff/content/firediff-original/diffModule.js | 15727 | /* See license.txt for terms of usage */
FBL.ns(function() { with (FBL) {
var Events = FireDiff.events,
Path = FireDiff.Path;
function revertChange(curChange, context) {
var ownerDoc, rootPath;
if (curChange.changeType == "CSS") {
rootPath = Path.getTopPath(curChange.xpath);
ownerDoc = Path.evaluateStylePath(rootPath, context.window.document);
} else {
ownerDoc = context.window.document.documentElement;
rootPath = Path.getElementPath(ownerDoc);
}
if (FBTrace.DBG_FIREDIFF) FBTrace.sysout("Revert change", curChange);
curChange.revert(ownerDoc, rootPath);
}
Firebug.DiffModule = extend(Firebug.ActivableModule, {
panelName: "firediff",
supportsFirebugEdits: Firebug.Editor.supportsStopEvent,
initialize: function() {
Firebug.ActivableModule.initialize.apply(this, arguments);
if (Firebug.CSSModule) {
// Maintain support for older versions of firebug that do not
// have the CSS change event implementation
Firebug.CSSModule.addListener(this);
}
if (Firebug.HTMLModule) {
Firebug.HTMLModule.addListener(this);
}
if (Firebug.Editor.supportsStopEvent) {
Firebug.Editor.addListener(this);
}
},
loadedContext: function(context) {
if (this.isAlwaysEnabled()) {
this.monitorContext(context);
}
},
onEnabled: function(context) {
this.monitorContext(context);
},
onDisabled: function(context) {
this.unmonitorContext(context);
},
//////////////////////////////////////////////
// Actions
revertAllChanges: function(change, context) {
var diffContext = this.getDiffContext(context);
var changes = diffContext.changes;
// Revert means everything, not just those that are filtered.
// Keeping the change model in sync for arbitrary changes is
// currently out of scope
//
// We also rely on filter to be designed such that the model's
// integrity remains.
for (var i = changes.length; i > 0; i--) {
var curChange = changes[i-1];
revertChange(curChange, context);
changes.splice(i-1, 1);
if (change == curChange) {
break;
}
}
},
revertChange: function(change, context, force) {
var diffContext = this.getDiffContext(context);
var changes = diffContext.changes;
var tempChanges = changes.slice();
var revert = Events.mergeRevert(change, tempChanges);
if ((revert.length > 1 || changes.length - tempChanges.length > 1) && !force) {
return false;
}
// Perform the revert
for (var i = revert.length; i > 0; i--) {
var curChange = revert[i-1];
revertChange(curChange, context);
}
diffContext.changes = tempChanges;
return revert;
},
//////////////////////////////////////////////
// Editor Listener
onBeginEditing: function(panel, editor, target, value) {
this.onBeginFirebugChange(target);
this.onSaveEdit(panel, editor, target, value);
},
onSaveEdit: function(panel, editor, target, value, previousValue) {
// Update the data store used for the HTML editor monitoring
var diffContext = this.getDiffContext();
diffContext.htmlEditPath = this.getHtmlEditorPaths(editor);
},
onStopEdit: function(panel, editor, target) {
this.onEndFirebugChange(target);
},
//////////////////////////////////////////////
// CSSModule Listener
onCSSInsertRule: function(styleSheet, cssText, ruleIndex) {
styleSheet.source = "dispatch";
this.recordChange(
new Events.css.CSSInsertRuleEvent(
styleSheet.cssRules[ruleIndex],
Events.ChangeSource.FIREBUG_CHANGE));
},
onCSSDeleteRule: function(styleSheet, ruleIndex) {
styleSheet.source = "dispatch";
this.recordChange(
new Events.css.CSSRemoveRuleEvent(
styleSheet.cssRules[ruleIndex],
Events.ChangeSource.FIREBUG_CHANGE));
},
onCSSSetProperty: function(style, propName, propValue, propPriority, prevValue, prevPriority, parent, baseText) {
if (!style.parentRule) {
// If we are dealing with an older version of firebug, protect ourselves from this failure and
// just drop the change completely
if (!parent)
return;
// This is a change to the inline style of a particular element, handle this.
// See: https://bugzilla.mozilla.org/show_bug.cgi?id=338679
this.recordChange(
new Events.dom.DOMAttrChangedEvent(
parent, MutationEvent.MODIFICATION, "style", style.cssText, baseText,
undefined, undefined, Events.ChangeSource.FIREBUG_CHANGE));
} else {
this.recordChange(
new Events.css.CSSSetPropertyEvent(
style.parentRule, propName, propValue, propPriority, prevValue, prevPriority, Events.ChangeSource.FIREBUG_CHANGE));
}
},
onCSSRemoveProperty: function(style, propName, prevValue, prevPriority, parent, baseText) {
if (!style.parentRule) {
// If we are dealing with an older version of firebug, protect ourselves from this failure and
// just drop the change completely
if (!parent)
return;
// This is a change to the inline style of a particular element, handle this.
// See: https://bugzilla.mozilla.org/show_bug.cgi?id=338679
this.recordChange(
new Events.dom.DOMAttrChangedEvent(
parent, MutationEvent.MODIFICATION, "style", style.cssText, baseText,
undefined, undefined, Events.ChangeSource.FIREBUG_CHANGE));
} else {
this.recordChange(
new Events.css.CSSRemovePropertyEvent(
style.parentRule, propName, prevValue, prevPriority, Events.ChangeSource.FIREBUG_CHANGE));
}
},
//////////////////////////////////////////////
// HTMLModule Listener
onBeginFirebugChange: function(node, context) {
var diffContext = this.getDiffContext(context);
diffContext.editTarget = node;
var rep = Firebug.getRepObject(node) || node;
if (rep instanceof Node) {
diffContext.editTargetXpath = Path.getElementPath(rep);
} else if (rep instanceof CSSRule || rep instanceof StyleSheet) {
diffContext.editTargetXpath = Path.getStylePath(rep);
} else {
diffContext.editTargetXpath = undefined;
}
if (FBTrace.DBG_FIREDIFF) FBTrace.sysout("DiffModule.onBeginFirebugChange", diffContext.editTarget);
diffContext.editEvents = [];
},
onEndFirebugChange: function(node, context) {
var diffContext = this.getDiffContext(context);
if (FBTrace.DBG_FIREDIFF) FBTrace.sysout("DiffModile.onEndFirebugChange: " + node, diffContext.editEvents);
var editEvents = diffContext.editEvents;
if (editEvents.length) {
editEvents = Events.merge(editEvents, true);
for (var i = 0; i < editEvents.length; i++) {
var change = editEvents[i];
// Special case for HTML free edit. It's not pretty but it gets the
// job done. In the future we may want to consider executing changes
// in the Firebug editors within ignore blocks, and generating events
// for the final states, but for now we want to keep the coupling
// low
function htmlEditChange() {
return diffContext.htmlEditPath
&& diffContext.htmlEditPath[0] <= change.xpath
&& change.xpath <= diffContext.htmlEditPath[1];
}
function changeApplies() {
return change.appliesTo(Firebug.getRepObject(diffContext.editTarget) || diffContext.editTarget, diffContext.editTargetXpath);
}
if (htmlEditChange() || changeApplies()) {
change.changeSource = Events.ChangeSource.FIREBUG_CHANGE;
}
this.dispatchChange(change);
}
}
delete diffContext.editTarget;
delete diffContext.editTargetXpath;
delete diffContext.editEvents;
delete diffContext.htmlEditPath;
},
//////////////////////////////////////////////
// Self
domEventLogger: function(ev, context) {
if (!this.ignoreNode(ev.target)) {
var diffContext = this.getDiffContext(context);
this.recordChange(
Events.dom.createDOMChange(ev, diffContext.changeSource),
context);
}
},
charDataChangedEventLogger: function(ev, context) {
// Filter out char data events whose parents are a firebug object
var filterNode = ev.target.parentNode;
if (!this.ignoreNode(ev.target.parentNode)) {
this.domEventLogger(ev, context);
}
},
attributeChangedEventLogger: function(ev, context) {
// We only care about attributes that actually change or are created or deleted
if (ev.attrChange != MutationEvent.MODIFICATION
|| ev.newValue != ev.prevValue) {
this.domEventLogger(ev, context);
}
},
monitorContext: function(context) {
if (FBTrace.DBG_ACTIVATION || FBTrace.DBG_FIREDIFF) { FBTrace.sysout("DiffModule.monitorContext", context); }
var diffContext = this.getDiffContext(context);
if (diffContext.eventLogger) return;
diffContext.eventLogger = bind(this.domEventLogger, this, context);
diffContext.attrEventLogger = bind(this.attributeChangedEventLogger, this, context);
diffContext.charDataEventLogger = bind(this.charDataChangedEventLogger, this, context);
context.window.addEventListener("DOMNodeInserted", diffContext.eventLogger, true);
context.window.addEventListener("DOMNodeRemoved", diffContext.eventLogger, true);
context.window.addEventListener("DOMAttrModified", diffContext.attrEventLogger, true);
context.window.addEventListener("DOMCharacterDataModified", diffContext.charDataEventLogger, true);
},
unmonitorContext: function(context) {
if (FBTrace.DBG_ACTIVATION || FBTrace.DBG_FIREDIFF) { FBTrace.sysout("DiffModule.unmonitorContext", context); }
var diffContext = this.getDiffContext(context);
if (!diffContext.eventLogger) return;
context.window.removeEventListener("DOMNodeInserted", diffContext.eventLogger, true);
context.window.removeEventListener("DOMNodeRemoved", diffContext.eventLogger, true);
context.window.removeEventListener("DOMAttrModified", diffContext.attrEventLogger, true);
context.window.removeEventListener("DOMCharacterDataModified", diffContext.charDataEventLogger, true);
delete diffContext.eventLogger;
delete diffContext.attrEventLogger;
delete diffContext.charDataEventLogger;
},
ignoreNode: function(node) {
// Ignore firebug elements and any top level elements that are not the doc element
return node.firebugIgnore
|| unwrapObject(node).firebugIgnore
|| (node.className || "").indexOf("firebug") > -1
|| (node.id || "").indexOf("firebug") > -1
|| (node.hasAttribute && node.hasAttribute("firebugIgnore"));
},
getHtmlEditorPaths: function(editor) {
// Select the xpath update range. This is from the first to after the
// last element in the range (or '}' if there is no sibling after that
// to simplify the match test)
//
// This is not 100%, erroring on the side marking app changes as Firebug changes
// To fully resolve this, deeper integration with Firebug will be required,
// most likely in the form of changes to the editors to use diff ignore
// blocks and generate custom events.
var elements = editor.editingElements;
if (elements) {
var nextEl = getNextElement((elements[1] || elements[0]).nextSibling);
return [
Path.getElementPath(elements[0]),
Path.getElementPath(nextEl) || '}'
];
}
},
clearChanges: function(context) {
if (FBTrace.DBG_FIREDIFF) FBTrace.sysout("DiffModule.clearChanges", context);
var diffContext = this.getDiffContext(context);
diffContext.changes = [];
dispatch(this.fbListeners, "onClearChanges", [context || FirebugContext]);
},
navNextChange: function(context) {
dispatch(this.fbListeners, "onNavNextChange", [context || FirebugContext]);
},
navPrevChange: function(context) {
dispatch(this.fbListeners, "onNavPrevChange", [context || FirebugContext]);
},
ignoreChanges: function(worker, context) {
// If no context is available failover. This failover is mostly for testing merges.
var diffContext = this.getDiffContext(context) || {};
try {
if (FBTrace.DBG_FIREDIFF) FBTrace.sysout("DiffModule: Set ignore changes", context);
diffContext.ignore = true;
worker();
} finally {
if (FBTrace.DBG_FIREDIFF) FBTrace.sysout("DiffModule: Reset ignore changes", context);
diffContext.ignore = false;
}
},
firebugChanges: function(worker, context) {
// If no context is available failover. This failover is mostly for testing merges.
var diffContext = this.getDiffContext(context) || {};
try {
if (FBTrace.DBG_FIREDIFF) FBTrace.sysout("DiffModule: Set firebug changes", context);
diffContext.changeSource = Events.ChangeSource.FIREBUG_CHANGE;
worker();
} finally {
if (FBTrace.DBG_FIREDIFF) FBTrace.sysout("DiffModule: Reset firebug changes", context);
delete diffContext.changeSource;
}
},
recordChange: function(change, context) {
if (FBTrace.DBG_FIREDIFF) FBTrace.sysout("DiffModule.recordChange", change);
var diffContext = this.getDiffContext(context);
// Ignore if a context does not exist, we are in ignore mode, or the context is not attached
if (!diffContext || diffContext.ignore || !diffContext.eventLogger) return;
if (!diffContext.editTarget) {
this.dispatchChange(change, context);
} else {
diffContext.editEvents.push(change);
}
},
dispatchChange: function(change, context) {
if (FBTrace.DBG_FIREDIFF) FBTrace.sysout("DiffModule.dispatchChange", change);
var diffContext = this.getDiffContext(context);
diffContext.changes.push(change);
dispatch(this.fbListeners, "onDiffChange", [change, context || FirebugContext]);
},
getChanges: function(context) {
var diffContext = this.getDiffContext(context);
return (diffContext && diffContext.changes) || [];
},
getDiffContext: function(context) {
context = context || FirebugContext;
if (!context) {
return null;
}
context.diffContext = context.diffContext || { changes: [] };
return context.diffContext;
}
});
Firebug.registerActivableModule(Firebug.DiffModule);
}}); | bsd-3-clause |
w3villa/spree_wholesale | app/overrides/wholesale-my-orders.rb | 310 | #insert_before :account_my_orders, 'hooks/wholesale_customer'
Deface::Override.new(:virtual_path => 'spree/users/show',
:name => 'wholesale-my-orders',
:insert_before => "[data-hook='account_my_orders'], #account_my_orders[data-hook]",
:partial => "spree/hooks/wholesale_customer",
:disabled => false)
| bsd-3-clause |
thor/django-localflavor | localflavor/cu/models.py | 2799 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.models import CharField
from django.utils.translation import ugettext as _
from .choices import PROVINCE_CHOICES, REGION_CHOICES
from .forms import CUIdentityCardNumberField as CUIdentityCardNumberFormField
from .forms import CUPostalCodeField as CUPostalCodeFormField
class CURegionField(CharField):
"""
A model field for the three-letter of the Cuban region abbreviation.
Forms represent it as a ``forms.CURegionField``.
.. versionadded:: 1.6
"""
description = _("Cuban regions (three uppercase letters)")
def __init__(self, *args, **kwargs):
kwargs['choices'] = REGION_CHOICES
kwargs['max_length'] = 3
super(CURegionField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(CURegionField, self).deconstruct()
del kwargs['choices']
return name, path, args, kwargs
class CUProvinceField(CharField):
"""
A model field for the three-letter of the Cuban province abbreviation in the database.
Forms represent it as a ``forms.CUProvinceField``.
.. versionadded:: 1.6
"""
description = _("Cuban provinces (three uppercase letters)")
def __init__(self, *args, **kwargs):
kwargs['choices'] = PROVINCE_CHOICES
kwargs['max_length'] = 3
super(CUProvinceField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(CUProvinceField, self).deconstruct()
del kwargs['choices']
return name, path, args, kwargs
class CUPostalCodeField(CharField):
"""
A model field for the Cuban postal code.
Forms represent it as a ``forms.CUPostalCodeField``.
.. versionadded:: 1.6
"""
description = _("Cuban postal code")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 5
super(CUPostalCodeField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': CUPostalCodeFormField}
defaults.update(kwargs)
return super(CUPostalCodeField, self).formfield(**defaults)
class CUIdentityCardNumberField(CharField):
"""
A model field for the Cuban identity card number.
Forms represent it as a ``forms.CUIdentityCardNumberField``.
.. versionadded:: 1.6
"""
description = _("Cuban identity card number")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 11
super(CUIdentityCardNumberField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': CUIdentityCardNumberFormField}
defaults.update(kwargs)
return super(CUIdentityCardNumberField, self).formfield(**defaults)
| bsd-3-clause |
AndyDiamondstein/vitess | go/vt/binlog/tables_filter_test.go | 3165 | // Copyright 2012, Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package binlog
import (
"testing"
binlogdatapb "github.com/youtube/vitess/go/vt/proto/binlogdata"
)
var testTables = []string{
"included1",
"included2",
}
func TestTablesFilterPass(t *testing.T) {
input := binlogdatapb.BinlogTransaction{
Statements: []*binlogdatapb.BinlogTransaction_Statement{
{
Category: binlogdatapb.BinlogTransaction_Statement_BL_SET,
Sql: "set1",
}, {
Category: binlogdatapb.BinlogTransaction_Statement_BL_DML,
Sql: "dml1 /* _stream included1 (id ) (500 ); */",
}, {
Category: binlogdatapb.BinlogTransaction_Statement_BL_DML,
Sql: "dml2 /* _stream included2 (id ) (500 ); */",
},
},
}
var got string
f := TablesFilterFunc(testTables, func(reply *binlogdatapb.BinlogTransaction) error {
got = bltToString(reply)
return nil
})
f(&input)
want := `statement: <6, "set1"> statement: <4, "dml1 /* _stream included1 (id ) (500 ); */"> statement: <4, "dml2 /* _stream included2 (id ) (500 ); */"> transaction_id: "" `
if want != got {
t.Errorf("want %s, got %s", want, got)
}
}
func TestTablesFilterSkip(t *testing.T) {
input := binlogdatapb.BinlogTransaction{
Statements: []*binlogdatapb.BinlogTransaction_Statement{
{
Category: binlogdatapb.BinlogTransaction_Statement_BL_SET,
Sql: "set1",
}, {
Category: binlogdatapb.BinlogTransaction_Statement_BL_DML,
Sql: "dml1 /* _stream excluded1 (id ) (500 ); */",
},
},
}
var got string
f := TablesFilterFunc(testTables, func(reply *binlogdatapb.BinlogTransaction) error {
got = bltToString(reply)
return nil
})
f(&input)
want := `transaction_id: "" `
if want != got {
t.Errorf("want %s, got %s", want, got)
}
}
func TestTablesFilterDDL(t *testing.T) {
input := binlogdatapb.BinlogTransaction{
Statements: []*binlogdatapb.BinlogTransaction_Statement{
{
Category: binlogdatapb.BinlogTransaction_Statement_BL_SET,
Sql: "set1",
}, {
Category: binlogdatapb.BinlogTransaction_Statement_BL_DDL,
Sql: "ddl",
},
},
}
var got string
f := TablesFilterFunc(testTables, func(reply *binlogdatapb.BinlogTransaction) error {
got = bltToString(reply)
return nil
})
f(&input)
want := `transaction_id: "" `
if want != got {
t.Errorf("want %s, got %s", want, got)
}
}
func TestTablesFilterMalformed(t *testing.T) {
input := binlogdatapb.BinlogTransaction{
Statements: []*binlogdatapb.BinlogTransaction_Statement{
{
Category: binlogdatapb.BinlogTransaction_Statement_BL_SET,
Sql: "set1",
}, {
Category: binlogdatapb.BinlogTransaction_Statement_BL_DML,
Sql: "ddl",
}, {
Category: binlogdatapb.BinlogTransaction_Statement_BL_DML,
Sql: "dml1 /* _stream excluded1*/",
},
},
}
var got string
f := TablesFilterFunc(testTables, func(reply *binlogdatapb.BinlogTransaction) error {
got = bltToString(reply)
return nil
})
f(&input)
want := `transaction_id: "" `
if want != got {
t.Errorf("want %s, got %s", want, got)
}
}
| bsd-3-clause |
yiisoft/yii2-app-advanced | common/tests/_support/UnitTester.php | 622 | <?php
namespace common\tests;
/**
* Inherited Methods
* @method void wantToTest($text)
* @method void wantTo($text)
* @method void execute($callable)
* @method void expectTo($prediction)
* @method void expect($prediction)
* @method void amGoingTo($argumentation)
* @method void am($role)
* @method void lookForwardTo($achieveValue)
* @method void comment($description)
* @method \Codeception\Lib\Friend haveFriend($name, $actorClass = NULL)
*
* @SuppressWarnings(PHPMD)
*/
class UnitTester extends \Codeception\Actor
{
use _generated\UnitTesterActions;
/**
* Define custom actions here
*/
}
| bsd-3-clause |
adirkuhn/expressoDrupal | core/vendor/twig/twig/lib/Twig/Sandbox/SecurityPolicyInterface.php | 584 | <?php
/*
* This file is part of Twig.
*
* (c) 2009 Fabien Potencier
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/**
* Interfaces that all security policy classes must implements.
*
* @package twig
* @author Fabien Potencier <fabien@symfony.com>
*/
interface Twig_Sandbox_SecurityPolicyInterface
{
public function checkSecurity($tags, $filters, $functions);
public function checkMethodAllowed($obj, $method);
public function checkPropertyAllowed($obj, $method);
}
| gpl-2.0 |
EPTamminga/Dnn.Platform | Dnn.AdminExperience/Dnn.PersonaBar.Extensions/Components/Recyclebin/Prompt/Commands/PurgeUser.cs | 1796 | // Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information
namespace Dnn.PersonaBar.Recyclebin.Components.Prompt.Commands
{
using System.Collections.Generic;
using Dnn.PersonaBar.Library.Prompt;
using Dnn.PersonaBar.Library.Prompt.Attributes;
using Dnn.PersonaBar.Library.Prompt.Models;
using DotNetNuke.Entities.Portals;
using DotNetNuke.Entities.Users;
[ConsoleCommand("purge-user", Constants.RecylcleBinCategory, "Prompt_PurgeUser_Description")]
public class PurgeUser : ConsoleCommandBase
{
[FlagParameter("id", "Prompt_PurgeUser_FlagId", "Integer", true)]
private const string FlagId = "id";
public override string LocalResourceFile => Constants.LocalResourcesFile;
private int UserId { get; set; }
public override void Init(string[] args, PortalSettings portalSettings, UserInfo userInfo, int activeTabId)
{
this.UserId = this.GetFlagValue(FlagId, "User Id", -1, true, true, true);
}
public override ConsoleResultModel Run()
{
var userInfo = UserController.Instance.GetUser(this.PortalId, this.UserId);
if (userInfo == null)
return new ConsoleErrorResultModel(string.Format(this.LocalizeString("UserNotFound"), this.UserId));
if (!userInfo.IsDeleted)
return new ConsoleErrorResultModel(this.LocalizeString("Prompt_CannotPurgeUser"));
RecyclebinController.Instance.DeleteUsers(new List<UserInfo> { userInfo });
return new ConsoleResultModel(this.LocalizeString("Prompt_UserPurged")) { Records = 1 };
}
}
}
| mit |
modwizcode/SpongeCommon | src/main/java/org/spongepowered/common/event/RegisteredListener.java | 4284 | /*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.event;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.spongepowered.api.event.Cancellable;
import org.spongepowered.api.event.Event;
import org.spongepowered.api.event.EventListener;
import org.spongepowered.api.event.Order;
import org.spongepowered.api.plugin.PluginContainer;
import java.util.EnumMap;
import java.util.List;
public final class RegisteredListener<T extends Event> implements SpongeEventListener<T>, Comparable<RegisteredListener<?>> {
private final PluginContainer plugin;
private final Class<T> eventClass;
private final Order order;
private final EventListener<? super T> listener;
private final boolean beforeModifications;
RegisteredListener(PluginContainer plugin, Class<T> eventClass, Order order, EventListener<? super T> listener, boolean beforeModifications) {
this.plugin = checkNotNull(plugin, "plugin");
this.eventClass = checkNotNull(eventClass, "eventClass");
this.order = checkNotNull(order, "order");
this.listener = checkNotNull(listener, "listener");
this.beforeModifications = beforeModifications;
}
public PluginContainer getPlugin() {
return this.plugin;
}
public Class<T> getEventClass() {
return this.eventClass;
}
public Order getOrder() {
return this.order;
}
public boolean isBeforeModifications() {
return this.beforeModifications;
}
@Override
public Object getHandle() {
if (this.listener instanceof SpongeEventListener) {
return ((SpongeEventListener<?>) this.listener).getHandle();
}
return this.listener;
}
@Override
public void handle(T event) throws Exception {
this.listener.handle(event);
}
@Override
public int compareTo(RegisteredListener<?> handler) {
return this.order.compareTo(handler.order);
}
public static final class Cache {
private final List<RegisteredListener<?>> listeners;
private final EnumMap<Order, List<RegisteredListener<?>>> listenersByOrder;
private static final Order[] ORDERS = Order.values();
Cache(List<RegisteredListener<?>> listeners) {
this.listeners = listeners;
this.listenersByOrder = Maps.newEnumMap(Order.class);
for (Order order : ORDERS) {
this.listenersByOrder.put(order, Lists.<RegisteredListener<?>>newArrayList());
}
for (RegisteredListener<?> handler : listeners) {
this.listenersByOrder.get(handler.getOrder()).add(handler);
}
}
public List<RegisteredListener<?>> getListeners() {
return this.listeners;
}
public List<RegisteredListener<?>> getListenersByOrder(Order order) {
return this.listenersByOrder.get(checkNotNull(order, "order"));
}
}
}
| mit |
linyows/forever | node_modules/flatiron/node_modules/director/lib/director/http/methods.js | 1482 | /*!
* Express - router - methods
* Copyright(c) 2010 TJ Holowaychuk <tj@vision-media.ca>
* MIT Licensed
*
* Adapted for SS
* (C) 2011 Nodejitsu Inc. <info@nodejitsu.com>
*
*/
/**
* Hypertext Transfer Protocol -- HTTP/1.1
* http://www.ietf.org/rfc/rfc2616.txt
*/
var RFC2616 = ['OPTIONS', 'GET', 'POST', 'PUT', 'DELETE', 'TRACE', 'CONNECT'];
/**
* HTTP Extensions for Distributed Authoring -- WEBDAV
* http://www.ietf.org/rfc/rfc2518.txt
*/
var RFC2518 = ['PROPFIND', 'PROPPATCH', 'MKCOL', 'COPY', 'MOVE', 'LOCK', 'UNLOCK'];
/**
* Versioning Extensions to WebDAV
* http://www.ietf.org/rfc/rfc3253.txt
*/
var RFC3253 = ['VERSION-CONTROL', 'REPORT', 'CHECKOUT', 'CHECKIN', 'UNCHECKOUT', 'MKWORKSPACE', 'UPDATE', 'LABEL', 'MERGE', 'BASELINE-CONTROL', 'MKACTIVITY'];
/**
* Ordered Collections Protocol (WebDAV)
* http://www.ietf.org/rfc/rfc3648.txt
*/
var RFC3648 = ['ORDERPATCH'];
/**
* Web Distributed Authoring and Versioning (WebDAV) Access Control Protocol
* http://www.ietf.org/rfc/rfc3744.txt
*/
var RFC3744 = ['ACL'];
/**
* Web Distributed Authoring and Versioning (WebDAV) SEARCH
* http://www.ietf.org/rfc/rfc5323.txt
*/
var RFC5323 = ['SEARCH'];
/**
* PATCH Method for HTTP
* http://www.ietf.org/rfc/rfc5789.txt
*/
var RFC5789 = ['PATCH'];
/**
* Expose the methods.
*/
module.exports = [].concat(
RFC2616,
RFC2518,
RFC3253,
RFC3648,
RFC3744,
RFC5323,
RFC5789
).map(function (method) {
return method.toLowerCase();
}); | mit |
StartupWeekend/engine | app/controllers/locomotive/api/tokens_controller.rb | 1939 | module Locomotive
module Api
class TokensController < Locomotive::Api::BaseController
skip_before_filter :require_account, :require_site, :set_current_thread_variables
def create
begin
token = Account.create_api_token(params[:email], params[:password], params[:api_key])
respond_with({ token: token }, location: root_url)
rescue Exception => e
respond_with({ message: e.message }, status: 401, location: root_url)
end
end
def destroy
begin
token = Account.invalidate_api_token(params[:id])
respond_with({ token: token }, location: root_url)
rescue Exception => e
respond_with({ message: e.message }, status: 404, location: root_url)
end
end
protected
def set_locale
I18n.locale = Locomotive.config.locales.first
end
def self.description
{
overall: %{Manage a session token which will be passed to all the other REST calls},
actions: {
create: {
description: %{Generate a session token from either an email and a password OR an api key},
params: { email: 'String', password: 'String' },
response: { token: 'String' },
example: {
command: %{curl -d 'email=john.doe@acme.org&password=secret' 'http://mysite.com/locomotive/api/tokens.json'},
response: %({ "token": "dtsjkqs1TJrWiSiJt2gg" })
}
},
destroy: {
description: %{Make a session token invalid},
response: { token: 'String' },
example: {
command: %{curl -X DELETE 'http://mysite.com/locomotive/api/tokens/dtsjkqs1TJrWiSiJt2gg.json'},
response: %({ "token": "dtsjkqs1TJrWiSiJt2gg" })
}
}
}
}
end
end
end
end
| mit |
clausjensen/Merchello | src/Merchello.Core/Models/EntityBase/IDateStamped.cs | 544 | namespace Merchello.Core.Models.EntityBase
{
using System;
using System.Runtime.Serialization;
/// <summary>
/// Represents an entity that is date stamped.
/// </summary>
public interface IDateStamped
{
/// <summary>
/// Gets or sets the Created Date
/// </summary>
[DataMember]
DateTime CreateDate { get; set; }
/// <summary>
/// Gets or sets the Modified Date
/// </summary>
[DataMember]
DateTime UpdateDate { get; set; }
}
} | mit |
Sorsly/subtle | google-cloud-sdk/lib/googlecloudsdk/third_party/apis/servicemanagement/v1/resources.py | 1875 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Resource definitions for cloud platform apis."""
import enum
BASE_URL = 'https://servicemanagement.googleapis.com/v1/'
class Collections(enum.Enum):
"""Collections for all supported apis."""
OPERATIONS = (
'operations',
'operations/{operationsId}',
{},
[u'operationsId']
)
SERVICES = (
'services',
'services/{serviceName}',
{},
[u'serviceName']
)
SERVICES_CONFIGS = (
'services.configs',
'services/{serviceName}/configs/{configId}',
{},
[u'serviceName', u'configId']
)
SERVICES_CUSTOMERSETTINGS = (
'services.customerSettings',
'services/{serviceName}/customerSettings/{customerId}',
{},
[u'serviceName', u'customerId']
)
SERVICES_PROJECTSETTINGS = (
'services.projectSettings',
'services/{serviceName}/projectSettings/{consumerProjectId}',
{},
[u'serviceName', u'consumerProjectId']
)
SERVICES_ROLLOUTS = (
'services.rollouts',
'services/{serviceName}/rollouts/{rolloutId}',
{},
[u'serviceName', u'rolloutId']
)
def __init__(self, collection_name, path, flat_paths, params):
self.collection_name = collection_name
self.path = path
self.flat_paths = flat_paths
self.params = params
| mit |
appfs/appfs | RayChew/Ex2/html/search/functions_5.js | 294 | var searchData=
[
['reportparseexception',['reportParseException',['../classParserErrorHandler.html#a7e9893b8664f42e38cecb483f0d26791',1,'ParserErrorHandler']]],
['reseterrors',['resetErrors',['../classParserErrorHandler.html#afcd94fcf4b801a4767a90b88721b432c',1,'ParserErrorHandler']]]
];
| mit |
Akhenoth/Factorian | concrete/vendor/mlocati/concrete5-translation-library/src/Parser/DynamicItem/Tree.php | 588 | <?php
namespace C5TL\Parser\DynamicItem;
/**
* Extract translatable data from AttributeSets.
*/
class Tree extends DynamicItem
{
/**
* {@inheritdoc}
*
* @see \C5TL\Parser\DynamicItem::getParsedItemNames()
*/
public function getParsedItemNames()
{
return function_exists('t') ? t('Trees and Topics') : 'Trees and Topics';
}
/**
* {@inheritdoc}
*
* @see \C5TL\Parser\DynamicItem::getClassNameForExtractor()
*/
protected function getClassNameForExtractor()
{
return '\Concrete\Core\Tree\Tree';
}
}
| mit |
fakelbst/Chrome-Last.fm-Scrobbler | src/connectors/rtbf.js | 240 | 'use strict';
Connector.playerSelector = '.player';
Connector.artistSelector = '.program-info .artist-name';
Connector.trackSelector = '.program-info .title-name';
Connector.isPlaying = () => $('.player-controls a').hasClass('paused');
| mit |
liu78778/node-webkit | src/api/window_bindings.cc | 8151 | // Copyright (c) 2012 Intel Corp
// Copyright (c) 2012 The Chromium Authors
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell co
// pies of the Software, and to permit persons to whom the Software is furnished
// to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in al
// l copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IM
// PLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNES
// S FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
// OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WH
// ETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#include "content/nw/src/api/window_bindings.h"
#include "base/values.h"
#include "content/child/child_thread.h"
#include "content/nw/src/api/bindings_common.h"
#include "content/renderer/render_view_impl.h"
#include "grit/nw_resources.h"
#undef LOG
using namespace WebCore;
#if defined(OS_WIN)
#define _USE_MATH_DEFINES
#include <math.h>
#endif
#include "third_party/WebKit/Source/config.h"
#include "third_party/WebKit/Source/core/html/HTMLIFrameElement.h"
#include "third_party/WebKit/public/web/WebFrame.h"
#include "third_party/WebKit/public/web/WebView.h"
#include "third_party/WebKit/Source/web/WebFrameImpl.h"
#include "third_party/WebKit/public/web/WebScriptSource.h"
#undef CHECK
#include "V8HTMLIFrameElement.h"
using WebKit::WebScriptSource;
using WebKit::WebFrame;
namespace nwapi {
WindowBindings::WindowBindings()
: v8::Extension("window_bindings.js",
GetStringResource(
IDR_NW_API_WINDOW_BINDINGS_JS).data(),
0, // num dependencies.
NULL, // dependencies array.
GetStringResource(
IDR_NW_API_WINDOW_BINDINGS_JS).size()) {
}
WindowBindings::~WindowBindings() {
}
v8::Handle<v8::FunctionTemplate>
WindowBindings::GetNativeFunction(v8::Handle<v8::String> name) {
if (name->Equals(v8::String::New("BindToShell")))
return v8::FunctionTemplate::New(BindToShell);
else if (name->Equals(v8::String::New("CallObjectMethod")))
return v8::FunctionTemplate::New(CallObjectMethod);
else if (name->Equals(v8::String::New("CallObjectMethodSync")))
return v8::FunctionTemplate::New(CallObjectMethodSync);
else if (name->Equals(v8::String::New("GetWindowObject")))
return v8::FunctionTemplate::New(GetWindowObject);
else if (name->Equals(v8::String::New("AllocateId")))
return v8::FunctionTemplate::New(AllocateId);
return v8::FunctionTemplate::New();
}
// static
void
WindowBindings::BindToShell(const v8::FunctionCallbackInfo<v8::Value>& args) {
int routing_id = args[0]->Int32Value();
int object_id = args[1]->Int32Value();
remote::AllocateObject(routing_id, object_id, "Window", v8::Object::New());
args.GetReturnValue().Set(v8::Undefined());
}
void
WindowBindings::AllocateId(const v8::FunctionCallbackInfo<v8::Value>& args) {
content::RenderViewImpl* render_view = static_cast<content::RenderViewImpl*>(GetEnteredRenderView());
int routing_id = render_view->GetRoutingID();
args.GetReturnValue().Set(remote::AllocateId(routing_id));
}
// static
void
WindowBindings::CallObjectMethod(const v8::FunctionCallbackInfo<v8::Value>& args) {
v8::Local<v8::Object> self = args[0]->ToObject();
int routing_id = self->Get(v8::String::New("routing_id"))->Int32Value();
int object_id = self->Get(v8::String::New("id"))->Int32Value();
std::string method = *v8::String::Utf8Value(args[1]);
content::RenderViewImpl* render_view = static_cast<content::RenderViewImpl*>(
content::RenderViewImpl::FromRoutingID(routing_id));
if (!render_view)
render_view = static_cast<content::RenderViewImpl*>(GetEnteredRenderView());
if (!render_view) {
std::string msg = "Unable to get render view in " + method;
args.GetReturnValue().Set(v8::ThrowException(v8::Exception::Error(v8::String::New(msg.c_str()))));
return;
}
WebFrame* main_frame = render_view->GetWebView()->mainFrame();
if (method == "EvaluateScript") {
v8::Handle<v8::Value> result;
v8::Handle<v8::Object> frm = v8::Handle<v8::Object>::Cast(args[2]);
WebFrame* web_frame = NULL;
if (frm->IsNull()) {
web_frame = main_frame;
}else{
WebCore::HTMLIFrameElement* iframe = WebCore::V8HTMLIFrameElement::toNative(frm);
web_frame = WebKit::WebFrameImpl::fromFrame(iframe->contentFrame());
}
#if defined(OS_WIN)
base::string16 jscript((WCHAR*)*v8::String::Value(args[3]));
#else
base::string16 jscript = *v8::String::Value(args[3]);
#endif
if (web_frame) {
result = web_frame->executeScriptAndReturnValue(WebScriptSource(jscript));
}
args.GetReturnValue().Set(result);
return;
} else if (method == "setDevToolsJail") {
v8::Handle<v8::Object> frm = v8::Handle<v8::Object>::Cast(args[2]);
if (frm->IsNull()) {
main_frame->setDevtoolsJail(NULL);
}else{
WebCore::HTMLIFrameElement* iframe = WebCore::V8HTMLIFrameElement::toNative(frm);
main_frame->setDevtoolsJail(WebKit::WebFrameImpl::fromFrame(iframe->contentFrame()));
}
args.GetReturnValue().Set(v8::Undefined());
return;
}
args.GetReturnValue().Set(remote::CallObjectMethod(render_view->GetRoutingID(),
object_id,
"Window", method, args[2]));
}
// static
void
WindowBindings::CallObjectMethodSync(const v8::FunctionCallbackInfo<v8::Value>& args) {
v8::HandleScope scope;
v8::Local<v8::Object> self = args[0]->ToObject();
int routing_id = self->Get(v8::String::New("routing_id"))->Int32Value();
int object_id = self->Get(v8::String::New("id"))->Int32Value();
std::string method = *v8::String::Utf8Value(args[1]);
content::RenderViewImpl* render_view = static_cast<content::RenderViewImpl*>(
content::RenderViewImpl::FromRoutingID(routing_id));
if (!render_view) {
std::string msg = "Unable to get render view in " + method;
args.GetReturnValue().Set(v8::ThrowException(v8::Exception::Error(v8::String::New(msg.c_str()))));
return;
}
if (method == "GetZoomLevel") {
float zoom_level = render_view->GetWebView()->zoomLevel();
v8::Local<v8::Array> array = v8::Array::New();
array->Set(0, v8::Number::New(zoom_level));
args.GetReturnValue().Set(scope.Close(array));
return;
}else if (method == "SetZoomLevel") {
double zoom_level = args[2]->ToNumber()->Value();
render_view->OnSetZoomLevel(zoom_level);
args.GetReturnValue().Set(v8::Undefined());
return;
}
args.GetReturnValue().Set(remote::CallObjectMethodSync(routing_id, object_id, "Window", method, args[2]));
}
// static
void
WindowBindings::GetWindowObject(const v8::FunctionCallbackInfo<v8::Value>& args) {
int routing_id = args[0]->Int32Value();
// Dark magic to digg out the RenderView from its id.
content::RenderViewImpl* render_view = static_cast<content::RenderViewImpl*>(
content::RenderViewImpl::FromRoutingID(routing_id));
if (!render_view) {
args.GetReturnValue().Set(v8::ThrowException(v8::Exception::Error(v8::String::New("Unable to get render view in GetWindowObject"))));
return;
}
// Return the window object.
args.GetReturnValue().Set(render_view->GetWebView()->mainFrame()->mainWorldScriptContext()->Global());
}
} // namespace nwapi
| mit |
ramassin/glm | glm/gtx/dual_quaternion.hpp | 9782 | ///////////////////////////////////////////////////////////////////////////////////
/// OpenGL Mathematics (glm.g-truc.net)
///
/// Copyright (c) 2005 - 2013 G-Truc Creation (www.g-truc.net)
/// Permission is hereby granted, free of charge, to any person obtaining a copy
/// of this software and associated documentation files (the "Software"), to deal
/// in the Software without restriction, including without limitation the rights
/// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
/// copies of the Software, and to permit persons to whom the Software is
/// furnished to do so, subject to the following conditions:
///
/// The above copyright notice and this permission notice shall be included in
/// all copies or substantial portions of the Software.
///
/// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
/// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
/// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
/// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
/// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
/// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
/// THE SOFTWARE.
///
/// @ref gtx_dual_quaternion
/// @file glm/gtx/dual_quaternion.hpp
/// @date 2013-02-10 / 2013-02-20
/// @author Maksim Vorobiev (msomeone@gmail.com)
///
/// @see core (dependence)
/// @see gtc_half_float (dependence)
/// @see gtc_constants (dependence)
/// @see gtc_quaternion (dependence)
///
/// @defgroup gtc_dual_quaternion GLM_GTX_dual_quaternion
/// @ingroup gtc
///
/// @brief Defines a templated dual-quaternion type and several dual-quaternion operations.
///
/// <glm/gtx/dual_quaternion.hpp> need to be included to use these functionalities.
///////////////////////////////////////////////////////////////////////////////////
#ifndef GLM_GTX_dual_quaternion
#define GLM_GTX_dual_quaternion
// Dependency:
#include "../glm.hpp"
#include "../gtc/constants.hpp"
#include "../gtc/quaternion.hpp"
#if(defined(GLM_MESSAGES) && !defined(GLM_EXT_INCLUDED))
# pragma message("GLM: GLM_GTX_dual_quaternion extension included")
#endif
namespace glm{
namespace detail
{
template <typename T, precision P>
struct tdualquat
{
enum ctor{null};
typedef glm::detail::tquat<T, P> part_type;
public:
glm::detail::tquat<T, P> real, dual;
GLM_FUNC_DECL GLM_CONSTEXPR int length() const;
// Constructors
tdualquat();
explicit tdualquat(tquat<T, P> const & real);
tdualquat(tquat<T, P> const & real,tquat<T, P> const & dual);
tdualquat(tquat<T, P> const & orientation,tvec3<T, P> const& translation);
//////////////////////////////////////////////////////////////
// tdualquat conversions
explicit tdualquat(tmat2x4<T, P> const & holder_mat);
explicit tdualquat(tmat3x4<T, P> const & aug_mat);
// Accesses
part_type & operator[](int i);
part_type const & operator[](int i) const;
// Operators
tdualquat<T, P> & operator*=(T const & s);
tdualquat<T, P> & operator/=(T const & s);
};
template <typename T, precision P>
detail::tquat<T, P> operator- (
detail::tquat<T, P> const & q);
template <typename T, precision P>
detail::tdualquat<T, P> operator+ (
detail::tdualquat<T, P> const & q,
detail::tdualquat<T, P> const & p);
template <typename T, precision P>
detail::tdualquat<T, P> operator* (
detail::tdualquat<T, P> const & q,
detail::tdualquat<T, P> const & p);
template <typename T, precision P>
detail::tvec3<T, P> operator* (
detail::tquat<T, P> const & q,
detail::tvec3<T, P> const & v);
template <typename T, precision P>
detail::tvec3<T, P> operator* (
detail::tvec3<T, P> const & v,
detail::tquat<T, P> const & q);
template <typename T, precision P>
detail::tvec4<T, P> operator* (
detail::tquat<T, P> const & q,
detail::tvec4<T, P> const & v);
template <typename T, precision P>
detail::tvec4<T, P> operator* (
detail::tvec4<T, P> const & v,
detail::tquat<T, P> const & q);
template <typename T, precision P>
detail::tdualquat<T, P> operator* (
detail::tdualquat<T, P> const & q,
T const & s);
template <typename T, precision P>
detail::tdualquat<T, P> operator* (
T const & s,
detail::tdualquat<T, P> const & q);
template <typename T, precision P>
detail::tdualquat<T, P> operator/ (
detail::tdualquat<T, P> const & q,
T const & s);
} //namespace detail
/// @addtogroup gtc_dual_quaternion
/// @{
/// Returns the normalized quaternion.
///
/// @see gtc_dual_quaternion
template <typename T, precision P>
detail::tdualquat<T, P> normalize(
detail::tdualquat<T, P> const & q);
/// Returns the linear interpolation of two dual quaternion.
///
/// @see gtc_dual_quaternion
template <typename T, precision P>
detail::tdualquat<T, P> lerp(
detail::tdualquat<T, P> const & x,
detail::tdualquat<T, P> const & y,
T const & a);
/// Returns the q inverse.
///
/// @see gtc_dual_quaternion
template <typename T, precision P>
detail::tdualquat<T, P> inverse(
detail::tdualquat<T, P> const & q);
/*
/// Extracts a rotation part from dual-quaternion to a 3 * 3 matrix.
/// TODO
///
/// @see gtc_dual_quaternion
template <typename T, precision P>
detail::tmat3x3<T, P> mat3_cast(
detail::tdualquat<T, P> const & x);
*/
/// Converts a quaternion to a 2 * 4 matrix.
///
/// @see gtc_dual_quaternion
template <typename T, precision P>
detail::tmat2x4<T, P> mat2x4_cast(
detail::tdualquat<T, P> const & x);
/// Converts a quaternion to a 3 * 4 matrix.
///
/// @see gtc_dual_quaternion
template <typename T, precision P>
detail::tmat3x4<T, P> mat3x4_cast(
detail::tdualquat<T, P> const & x);
/// Converts a 2 * 4 matrix (matrix which holds real and dual parts) to a quaternion.
///
/// @see gtc_dual_quaternion
template <typename T, precision P>
detail::tdualquat<T, P> dualquat_cast(
detail::tmat2x4<T, P> const & x);
/// Converts a 3 * 4 matrix (augmented matrix rotation + translation) to a quaternion.
///
/// @see gtc_dual_quaternion
template <typename T, precision P>
detail::tdualquat<T, P> dualquat_cast(
detail::tmat3x4<T, P> const & x);
/// Dual-quaternion of low single-precision floating-point numbers.
///
/// @see gtc_dual_quaternion
typedef detail::tdualquat<float, lowp> lowp_dualquat;
/// Dual-quaternion of medium single-precision floating-point numbers.
///
/// @see gtc_dual_quaternion
typedef detail::tdualquat<float, mediump> mediump_dualquat;
/// Dual-quaternion of high single-precision floating-point numbers.
///
/// @see gtc_dual_quaternion
typedef detail::tdualquat<float, highp> highp_dualquat;
/// Dual-quaternion of low single-precision floating-point numbers.
///
/// @see gtc_dual_quaternion
typedef detail::tdualquat<float, lowp> lowp_fdualquat;
/// Dual-quaternion of medium single-precision floating-point numbers.
///
/// @see gtc_dual_quaternion
typedef detail::tdualquat<float, mediump> mediump_fdualquat;
/// Dual-quaternion of high single-precision floating-point numbers.
///
/// @see gtc_dual_quaternion
typedef detail::tdualquat<float, highp> highp_fdualquat;
/// Dual-quaternion of low double-precision floating-point numbers.
///
/// @see gtc_dual_quaternion
typedef detail::tdualquat<double, lowp> lowp_ddualquat;
/// Dual-quaternion of medium double-precision floating-point numbers.
///
/// @see gtc_dual_quaternion
typedef detail::tdualquat<double, mediump> mediump_ddualquat;
/// Dual-quaternion of high double-precision floating-point numbers.
///
/// @see gtc_dual_quaternion
typedef detail::tdualquat<double, highp> highp_ddualquat;
#if(!defined(GLM_PRECISION_HIGHP_FLOAT) && !defined(GLM_PRECISION_MEDIUMP_FLOAT) && !defined(GLM_PRECISION_LOWP_FLOAT))
/// Dual-quaternion of floating-point numbers.
///
/// @see gtc_dual_quaternion
typedef highp_fdualquat dualquat;
/// Dual-quaternion of single-precision floating-point numbers.
///
/// @see gtc_dual_quaternion
typedef highp_fdualquat fdualquat;
#elif(defined(GLM_PRECISION_HIGHP_FLOAT) && !defined(GLM_PRECISION_MEDIUMP_FLOAT) && !defined(GLM_PRECISION_LOWP_FLOAT))
typedef highp_fdualquat dualquat;
typedef highp_fdualquat fdualquat;
#elif(!defined(GLM_PRECISION_HIGHP_FLOAT) && defined(GLM_PRECISION_MEDIUMP_FLOAT) && !defined(GLM_PRECISION_LOWP_FLOAT))
typedef mediump_fdualquat dualquat;
typedef mediump_fdualquat fdualquat;
#elif(!defined(GLM_PRECISION_HIGHP_FLOAT) && !defined(GLM_PRECISION_MEDIUMP_FLOAT) && defined(GLM_PRECISION_LOWP_FLOAT))
typedef lowp_fdualquat dualquat;
typedef lowp_fdualquat fdualquat;
#else
# error "GLM error: multiple default precision requested for single-precision floating-point types"
#endif
#if(!defined(GLM_PRECISION_HIGHP_DOUBLE) && !defined(GLM_PRECISION_MEDIUMP_DOUBLE) && !defined(GLM_PRECISION_LOWP_DOUBLE))
/// Dual-quaternion of default double-precision floating-point numbers.
///
/// @see gtc_dual_quaternion
typedef highp_ddualquat ddualquat;
#elif(defined(GLM_PRECISION_HIGHP_DOUBLE) && !defined(GLM_PRECISION_MEDIUMP_DOUBLE) && !defined(GLM_PRECISION_LOWP_DOUBLE))
typedef highp_ddualquat ddualquat;
#elif(!defined(GLM_PRECISION_HIGHP_DOUBLE) && defined(GLM_PRECISION_MEDIUMP_DOUBLE) && !defined(GLM_PRECISION_LOWP_DOUBLE))
typedef mediump_ddualquat ddualquat;
#elif(!defined(GLM_PRECISION_HIGHP_DOUBLE) && !defined(GLM_PRECISION_MEDIUMP_DOUBLE) && defined(GLM_PRECISION_LOWP_DOUBLE))
typedef lowp_ddualquat ddualquat;
#else
# error "GLM error: Multiple default precision requested for double-precision floating-point types"
#endif
/// @}
} //namespace glm
#include "dual_quaternion.inl"
#endif//GLM_GTX_dual_quaternion
| mit |
kerrishotts/Mastering-PhoneGap-Code-Package | logology/test/NotesModel.js | 2882 | /*
* Logology testing suite
*
* This suite tests the basic features of Logology. Don't assume that this test suite
* is in any way complete; as bugs are discovered, new tests will be added.
*
* Author: Kerri Shotts <kerrishotts@gmail.com>
* http://www.photokandy.com/books/mastering-phonegap
*
* MIT LICENSED
*
* Copyright (c) 2016 Packt Publishing
* Portions Copyright (c) 2016 Kerri Shotts (photoKandy Studios LLC)
* Portions Copyright various third parties where noted.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this
* software and associated documentation files (the "Software"), to deal in the Software
* without restriction, including without limitation the rights to use, copy, modify,
* merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to the following
* conditions:
* The above copyright notice and this permission notice shall be included in all copies
* or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT
* OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
"use strict";
let should = require("./helpers/setup").should;
import Notes from "../src/www/js/app/models/Notes";
describe("Notes", () => {
describe("#Create", () => {
it("should be able to create a new Notes object", () => {
let notes = new Notes();
return notes.should.exist;
});
});
describe("#Manage", () => {
let notes = {};
it ("report that an un-noted word should not have a note", () => {
notes = new Notes();
notes.doesWordHaveANote("mumble").should.become(false);
});
it ("should be able to save a note", () => {
notes.saveNoteForWord("cat", "Cats are cute!").should.be.fulfilled;
});
it ("... and now the word should have a note", () => {
notes.doesWordHaveANote("cat").should.become(true);
});
it ("... and the note should be what we expect", () => {
notes.getNoteForWord("cat").should.become("Cats are cute!");
});
it ("should be able to remove the note from the word", () => {
notes.removeNoteFromWord("cat").should.be.fulfilled;
});
it ("... and now it shouldn't have a note", () => {
notes.doesWordHaveANote("cat").should.become(false);
});
})
});
| mit |
jzawadzki/symfony | src/Symfony/Component/Form/Tests/AbstractBootstrap4LayoutTest.php | 31231 | <?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\Form\Tests;
use Symfony\Component\Form\FormError;
/**
* Abstract class providing test cases for the Bootstrap 4 Twig form theme.
*
* @author Hidde Wieringa <hidde@hiddewieringa.nl>
*/
abstract class AbstractBootstrap4LayoutTest extends AbstractBootstrap3LayoutTest
{
public function testLabelOnForm()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\DateType');
$view = $form->createView();
$this->renderWidget($view, array('label' => 'foo'));
$html = $this->renderLabel($view);
$this->assertMatchesXpath($html,
'/legend
[@class="col-form-legend required"]
[.="[trans]Name[/trans]"]
'
);
}
public function testLabelDoesNotRenderFieldAttributes()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\TextType');
$html = $this->renderLabel($form->createView(), null, array(
'attr' => array(
'class' => 'my&class',
),
));
$this->assertMatchesXpath($html,
'/label
[@for="name"]
[@class="form-control-label required"]
'
);
}
public function testLabelWithCustomAttributesPassedDirectly()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\TextType');
$html = $this->renderLabel($form->createView(), null, array(
'label_attr' => array(
'class' => 'my&class',
),
));
$this->assertMatchesXpath($html,
'/label
[@for="name"]
[@class="my&class form-control-label required"]
'
);
}
public function testLabelWithCustomTextAndCustomAttributesPassedDirectly()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\TextType');
$html = $this->renderLabel($form->createView(), 'Custom label', array(
'label_attr' => array(
'class' => 'my&class',
),
));
$this->assertMatchesXpath($html,
'/label
[@for="name"]
[@class="my&class form-control-label required"]
[.="[trans]Custom label[/trans]"]
'
);
}
public function testLabelWithCustomTextAsOptionAndCustomAttributesPassedDirectly()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\TextType', null, array(
'label' => 'Custom label',
));
$html = $this->renderLabel($form->createView(), null, array(
'label_attr' => array(
'class' => 'my&class',
),
));
$this->assertMatchesXpath($html,
'/label
[@for="name"]
[@class="my&class form-control-label required"]
[.="[trans]Custom label[/trans]"]
'
);
}
public function testLegendOnExpandedType()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', null, array(
'label' => 'Custom label',
'expanded' => true,
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b'),
));
$view = $form->createView();
$this->renderWidget($view);
$html = $this->renderLabel($view);
$this->assertMatchesXpath($html,
'/legend
[@class="col-form-legend required"]
[.="[trans]Custom label[/trans]"]
'
);
}
public function testErrors()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\TextType');
$form->addError(new FormError('[trans]Error 1[/trans]'));
$form->addError(new FormError('[trans]Error 2[/trans]'));
$view = $form->createView();
$html = $this->renderErrors($view);
$this->assertMatchesXpath($html,
'/div
[@class="alert alert-danger"]
[
./ul
[@class="list-unstyled mb-0"]
[
./li
[.="[trans]Error 1[/trans]"]
/following-sibling::li
[.="[trans]Error 2[/trans]"]
]
[count(./li)=2]
]
'
);
}
public function testCheckedCheckbox()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\CheckboxType', true);
$this->assertWidgetMatchesXpath($form->createView(), array('id' => 'my&id', 'attr' => array('class' => 'my&class')),
'/div
[@class="form-check"]
[
./label
[.=" [trans]Name[/trans]"]
[@class="form-check-label required"]
[
./input[@type="checkbox"][@name="name"][@id="my&id"][@class="my&class form-check-input"][@checked="checked"][@value="1"]
]
]
'
);
}
public function testSingleChoiceAttributesWithMainAttributes()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', '&a', array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b'),
'multiple' => false,
'expanded' => false,
'attr' => array('class' => 'bar&baz'),
));
$this->assertWidgetMatchesXpath($form->createView(), array('attr' => array('class' => 'bar&baz')),
'/select
[@name="name"]
[@class="bar&baz form-control"]
[not(@required)]
[
./option[@value="&a"][@selected="selected"][.="[trans]Choice&A[/trans]"]
/following-sibling::option[@value="&b"][not(@selected)][.="[trans]Choice&B[/trans]"]
]
[count(./option)=2]
'
);
}
public function testSingleExpandedChoiceAttributesWithMainAttributes()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', '&a', array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b'),
'multiple' => false,
'expanded' => true,
'attr' => array('class' => 'bar&baz'),
));
$this->assertWidgetMatchesXpath($form->createView(), array('attr' => array('class' => 'bar&baz')),
'/div
[@class="bar&baz"]
[
./div
[@class="form-check"]
[
./label
[.=" [trans]Choice&A[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_0"][@value="&a"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" [trans]Choice&B[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_1"][@value="&b"][not(@checked)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testUncheckedCheckbox()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\CheckboxType', false);
$this->assertWidgetMatchesXpath($form->createView(), array('id' => 'my&id', 'attr' => array('class' => 'my&class')),
'/div
[@class="form-check"]
[
./label
[.=" [trans]Name[/trans]"]
[
./input[@type="checkbox"][@name="name"][@id="my&id"][@class="my&class form-check-input"][not(@checked)]
]
]
'
);
}
public function testCheckboxWithValue()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\CheckboxType', false, array(
'value' => 'foo&bar',
));
$this->assertWidgetMatchesXpath($form->createView(), array('id' => 'my&id', 'attr' => array('class' => 'my&class')),
'/div
[@class="form-check"]
[
./label
[.=" [trans]Name[/trans]"]
[
./input[@type="checkbox"][@name="name"][@id="my&id"][@class="my&class form-check-input"][@value="foo&bar"]
]
]
'
);
}
public function testSingleChoiceExpanded()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', '&a', array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b'),
'multiple' => false,
'expanded' => true,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[.=" [trans]Choice&A[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_0"][@value="&a"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" [trans]Choice&B[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_1"][@value="&b"][not(@checked)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testSingleChoiceExpandedWithLabelsAsFalse()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', '&a', array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b'),
'choice_label' => false,
'multiple' => false,
'expanded' => true,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[
./input[@type="radio"][@name="name"][@id="name_0"][@value="&a"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[
./input[@type="radio"][@name="name"][@id="name_1"][@value="&b"][not(@checked)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testSingleChoiceExpandedWithLabelsSetByCallable()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', '&a', array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b', 'Choice&C' => '&c'),
'choice_label' => function ($choice, $label, $value) {
if ('&b' === $choice) {
return false;
}
return 'label.'.$value;
},
'multiple' => false,
'expanded' => true,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[.=" [trans]label.&a[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_0"][@value="&a"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[
./input[@type="radio"][@name="name"][@id="name_1"][@value="&b"][not(@checked)]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" [trans]label.&c[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_2"][@value="&c"][not(@checked)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testSingleChoiceExpandedWithLabelsSetFalseByCallable()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', '&a', array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b'),
'choice_label' => function () {
return false;
},
'multiple' => false,
'expanded' => true,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[
./input[@type="radio"][@name="name"][@id="name_0"][@value="&a"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[
./input[@type="radio"][@name="name"][@id="name_1"][@value="&b"][not(@checked)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testSingleChoiceExpandedWithoutTranslation()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', '&a', array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b'),
'multiple' => false,
'expanded' => true,
'choice_translation_domain' => false,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[.=" Choice&A"]
[
./input[@type="radio"][@name="name"][@id="name_0"][@value="&a"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" Choice&B"]
[
./input[@type="radio"][@name="name"][@id="name_1"][@value="&b"][not(@checked)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testSingleChoiceExpandedAttributes()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', '&a', array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b'),
'choice_attr' => array('Choice&B' => array('class' => 'foo&bar')),
'multiple' => false,
'expanded' => true,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[.=" [trans]Choice&A[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_0"][@value="&a"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" [trans]Choice&B[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_1"][@value="&b"][not(@checked)][@class="foo&bar form-check-input"]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testSingleChoiceExpandedWithPlaceholder()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', '&a', array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b'),
'multiple' => false,
'expanded' => true,
'placeholder' => 'Test&Me',
'required' => false,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[.=" [trans]Test&Me[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_placeholder"][not(@checked)]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" [trans]Choice&A[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_0"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" [trans]Choice&B[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_1"][not(@checked)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testSingleChoiceExpandedWithPlaceholderWithoutTranslation()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', '&a', array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b'),
'multiple' => false,
'expanded' => true,
'required' => false,
'choice_translation_domain' => false,
'placeholder' => 'Placeholder&Not&Translated',
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[.=" Placeholder&Not&Translated"]
[
./input[@type="radio"][@name="name"][@id="name_placeholder"][not(@checked)]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" Choice&A"]
[
./input[@type="radio"][@name="name"][@id="name_0"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" Choice&B"]
[
./input[@type="radio"][@name="name"][@id="name_1"][not(@checked)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testSingleChoiceExpandedWithBooleanValue()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', true, array(
'choices' => array('Choice&A' => '1', 'Choice&B' => '0'),
'multiple' => false,
'expanded' => true,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[.=" [trans]Choice&A[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_0"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" [trans]Choice&B[/trans]"]
[
./input[@type="radio"][@name="name"][@id="name_1"][not(@checked)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testMultipleChoiceExpanded()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', array('&a', '&c'), array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b', 'Choice&C' => '&c'),
'multiple' => true,
'expanded' => true,
'required' => true,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[.=" [trans]Choice&A[/trans]"]
[
./input[@type="checkbox"][@name="name[]"][@id="name_0"][@checked][not(@required)]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" [trans]Choice&B[/trans]"]
[
./input[@type="checkbox"][@name="name[]"][@id="name_1"][not(@checked)][not(@required)]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" [trans]Choice&C[/trans]"]
[
./input[@type="checkbox"][@name="name[]"][@id="name_2"][@checked][not(@required)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testMultipleChoiceExpandedWithLabelsAsFalse()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', array('&a'), array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b'),
'choice_label' => false,
'multiple' => true,
'expanded' => true,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[
./input[@type="checkbox"][@name="name[]"][@id="name_0"][@value="&a"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[
./input[@type="checkbox"][@name="name[]"][@id="name_1"][@value="&b"][not(@checked)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testMultipleChoiceExpandedWithLabelsSetByCallable()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', array('&a'), array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b', 'Choice&C' => '&c'),
'choice_label' => function ($choice, $label, $value) {
if ('&b' === $choice) {
return false;
}
return 'label.'.$value;
},
'multiple' => true,
'expanded' => true,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[.=" [trans]label.&a[/trans]"]
[
./input[@type="checkbox"][@name="name[]"][@id="name_0"][@value="&a"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[
./input[@type="checkbox"][@name="name[]"][@id="name_1"][@value="&b"][not(@checked)]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" [trans]label.&c[/trans]"]
[
./input[@type="checkbox"][@name="name[]"][@id="name_2"][@value="&c"][not(@checked)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testMultipleChoiceExpandedWithLabelsSetFalseByCallable()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', array('&a'), array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b'),
'choice_label' => function () {
return false;
},
'multiple' => true,
'expanded' => true,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[
./input[@type="checkbox"][@name="name[]"][@id="name_0"][@value="&a"][@checked]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[
./input[@type="checkbox"][@name="name[]"][@id="name_1"][@value="&b"][not(@checked)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testMultipleChoiceExpandedWithoutTranslation()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', array('&a', '&c'), array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b', 'Choice&C' => '&c'),
'multiple' => true,
'expanded' => true,
'required' => true,
'choice_translation_domain' => false,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[.=" Choice&A"]
[
./input[@type="checkbox"][@name="name[]"][@id="name_0"][@checked][not(@required)]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" Choice&B"]
[
./input[@type="checkbox"][@name="name[]"][@id="name_1"][not(@checked)][not(@required)]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" Choice&C"]
[
./input[@type="checkbox"][@name="name[]"][@id="name_2"][@checked][not(@required)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testMultipleChoiceExpandedAttributes()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\ChoiceType', array('&a', '&c'), array(
'choices' => array('Choice&A' => '&a', 'Choice&B' => '&b', 'Choice&C' => '&c'),
'choice_attr' => array('Choice&B' => array('class' => 'foo&bar')),
'multiple' => true,
'expanded' => true,
'required' => true,
));
$this->assertWidgetMatchesXpath($form->createView(), array(),
'/div
[
./div
[@class="form-check"]
[
./label
[.=" [trans]Choice&A[/trans]"]
[
./input[@type="checkbox"][@name="name[]"][@id="name_0"][@checked][not(@required)]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" [trans]Choice&B[/trans]"]
[
./input[@type="checkbox"][@name="name[]"][@id="name_1"][not(@checked)][not(@required)][@class="foo&bar form-check-input"]
]
]
/following-sibling::div
[@class="form-check"]
[
./label
[.=" [trans]Choice&C[/trans]"]
[
./input[@type="checkbox"][@name="name[]"][@id="name_2"][@checked][not(@required)]
]
]
/following-sibling::input[@type="hidden"][@id="name__token"]
]
'
);
}
public function testCheckedRadio()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\RadioType', true);
$this->assertWidgetMatchesXpath($form->createView(), array('id' => 'my&id', 'attr' => array('class' => 'my&class')),
'/div
[@class="form-check"]
[
./label
[@class="form-check-label required"]
[
./input
[@id="my&id"]
[@type="radio"]
[@name="name"]
[@class="my&class form-check-input"]
[@checked="checked"]
[@value="1"]
]
]
'
);
}
public function testUncheckedRadio()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\RadioType', false);
$this->assertWidgetMatchesXpath($form->createView(), array('id' => 'my&id', 'attr' => array('class' => 'my&class')),
'/div
[@class="form-check"]
[
./label
[@class="form-check-label required"]
[
./input
[@id="my&id"]
[@type="radio"]
[@name="name"]
[@class="my&class form-check-input"]
[not(@checked)]
]
]
'
);
}
public function testRadioWithValue()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\RadioType', false, array(
'value' => 'foo&bar',
));
$this->assertWidgetMatchesXpath($form->createView(), array('id' => 'my&id', 'attr' => array('class' => 'my&class')),
'/div
[@class="form-check"]
[
./label
[@class="form-check-label required"]
[
./input
[@id="my&id"]
[@type="radio"]
[@name="name"]
[@class="my&class form-check-input"]
[@value="foo&bar"]
]
]
'
);
}
public function testButtonAttributeNameRepeatedIfTrue()
{
$form = $this->factory->createNamed('button', 'Symfony\Component\Form\Extension\Core\Type\ButtonType', null, array(
'attr' => array('foo' => true),
));
$html = $this->renderWidget($form->createView());
// foo="foo"
$this->assertSame('<button type="button" id="button" name="button" foo="foo" class="btn-secondary btn">[trans]Button[/trans]</button>', $html);
}
public function testFile()
{
$form = $this->factory->createNamed('name', 'Symfony\Component\Form\Extension\Core\Type\FileType');
$this->assertWidgetMatchesXpath($form->createView(), array('attr' => array('class' => 'my&class form-control-file')),
'/input
[@type="file"]
'
);
}
}
| mit |
jweather/sonic-pi | app/server/vendor/parslet/spec/acceptance/examples_spec.rb | 969 | require 'spec_helper'
require 'open3'
describe "Regression on" do
Dir["example/*.rb"].each do |example|
context example do
# Generates a product path for a given example file.
def product_path(str, ext)
str.
gsub('.rb', ".#{ext}").
gsub('example/','example/output/')
end
it "runs successfully" do
stdin, stdout, stderr = Open3.popen3("ruby #{example}")
handle_map = {
stdout => :out,
stderr => :err
}
expectation_found = handle_map.any? do |io, ext|
name = product_path(example, ext)
if File.exists?(name)
io.read.strip.should == File.read(name).strip
true
end
end
unless expectation_found
fail "Example doesn't have either an .err or an .out file. "+
"Please create in examples/output!"
end
end
end
end
end
| mit |
phusion/passenger | src/cxx_supportlib/vendor-modified/boost/asio/basic_serial_port.hpp | 31972 | //
// basic_serial_port.hpp
// ~~~~~~~~~~~~~~~~~~~~~
//
// Copyright (c) 2003-2021 Christopher M. Kohlhoff (chris at kohlhoff dot com)
// Copyright (c) 2008 Rep Invariant Systems, Inc. (info@repinvariant.com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
#ifndef BOOST_ASIO_BASIC_SERIAL_PORT_HPP
#define BOOST_ASIO_BASIC_SERIAL_PORT_HPP
#if defined(_MSC_VER) && (_MSC_VER >= 1200)
# pragma once
#endif // defined(_MSC_VER) && (_MSC_VER >= 1200)
#include <boost/asio/detail/config.hpp>
#if defined(BOOST_ASIO_HAS_SERIAL_PORT) \
|| defined(GENERATING_DOCUMENTATION)
#include <string>
#include <boost/asio/any_io_executor.hpp>
#include <boost/asio/async_result.hpp>
#include <boost/asio/detail/handler_type_requirements.hpp>
#include <boost/asio/detail/io_object_impl.hpp>
#include <boost/asio/detail/non_const_lvalue.hpp>
#include <boost/asio/detail/throw_error.hpp>
#include <boost/asio/detail/type_traits.hpp>
#include <boost/asio/error.hpp>
#include <boost/asio/execution_context.hpp>
#include <boost/asio/serial_port_base.hpp>
#if defined(BOOST_ASIO_HAS_IOCP)
# include <boost/asio/detail/win_iocp_serial_port_service.hpp>
#else
# include <boost/asio/detail/reactive_serial_port_service.hpp>
#endif
#if defined(BOOST_ASIO_HAS_MOVE)
# include <utility>
#endif // defined(BOOST_ASIO_HAS_MOVE)
#include <boost/asio/detail/push_options.hpp>
namespace boost {
namespace asio {
/// Provides serial port functionality.
/**
* The basic_serial_port class provides a wrapper over serial port
* functionality.
*
* @par Thread Safety
* @e Distinct @e objects: Safe.@n
* @e Shared @e objects: Unsafe.
*/
template <typename Executor = any_io_executor>
class basic_serial_port
: public serial_port_base
{
public:
/// The type of the executor associated with the object.
typedef Executor executor_type;
/// Rebinds the serial port type to another executor.
template <typename Executor1>
struct rebind_executor
{
/// The serial port type when rebound to the specified executor.
typedef basic_serial_port<Executor1> other;
};
/// The native representation of a serial port.
#if defined(GENERATING_DOCUMENTATION)
typedef implementation_defined native_handle_type;
#elif defined(BOOST_ASIO_HAS_IOCP)
typedef detail::win_iocp_serial_port_service::native_handle_type
native_handle_type;
#else
typedef detail::reactive_serial_port_service::native_handle_type
native_handle_type;
#endif
/// A basic_basic_serial_port is always the lowest layer.
typedef basic_serial_port lowest_layer_type;
/// Construct a basic_serial_port without opening it.
/**
* This constructor creates a serial port without opening it.
*
* @param ex The I/O executor that the serial port will use, by default, to
* dispatch handlers for any asynchronous operations performed on the
* serial port.
*/
explicit basic_serial_port(const executor_type& ex)
: impl_(0, ex)
{
}
/// Construct a basic_serial_port without opening it.
/**
* This constructor creates a serial port without opening it.
*
* @param context An execution context which provides the I/O executor that
* the serial port will use, by default, to dispatch handlers for any
* asynchronous operations performed on the serial port.
*/
template <typename ExecutionContext>
explicit basic_serial_port(ExecutionContext& context,
typename constraint<
is_convertible<ExecutionContext&, execution_context&>::value,
defaulted_constraint
>::type = defaulted_constraint())
: impl_(0, 0, context)
{
}
/// Construct and open a basic_serial_port.
/**
* This constructor creates and opens a serial port for the specified device
* name.
*
* @param ex The I/O executor that the serial port will use, by default, to
* dispatch handlers for any asynchronous operations performed on the
* serial port.
*
* @param device The platform-specific device name for this serial
* port.
*/
basic_serial_port(const executor_type& ex, const char* device)
: impl_(0, ex)
{
boost::system::error_code ec;
impl_.get_service().open(impl_.get_implementation(), device, ec);
boost::asio::detail::throw_error(ec, "open");
}
/// Construct and open a basic_serial_port.
/**
* This constructor creates and opens a serial port for the specified device
* name.
*
* @param context An execution context which provides the I/O executor that
* the serial port will use, by default, to dispatch handlers for any
* asynchronous operations performed on the serial port.
*
* @param device The platform-specific device name for this serial
* port.
*/
template <typename ExecutionContext>
basic_serial_port(ExecutionContext& context, const char* device,
typename constraint<
is_convertible<ExecutionContext&, execution_context&>::value
>::type = 0)
: impl_(0, 0, context)
{
boost::system::error_code ec;
impl_.get_service().open(impl_.get_implementation(), device, ec);
boost::asio::detail::throw_error(ec, "open");
}
/// Construct and open a basic_serial_port.
/**
* This constructor creates and opens a serial port for the specified device
* name.
*
* @param ex The I/O executor that the serial port will use, by default, to
* dispatch handlers for any asynchronous operations performed on the
* serial port.
*
* @param device The platform-specific device name for this serial
* port.
*/
basic_serial_port(const executor_type& ex, const std::string& device)
: impl_(0, ex)
{
boost::system::error_code ec;
impl_.get_service().open(impl_.get_implementation(), device, ec);
boost::asio::detail::throw_error(ec, "open");
}
/// Construct and open a basic_serial_port.
/**
* This constructor creates and opens a serial port for the specified device
* name.
*
* @param context An execution context which provides the I/O executor that
* the serial port will use, by default, to dispatch handlers for any
* asynchronous operations performed on the serial port.
*
* @param device The platform-specific device name for this serial
* port.
*/
template <typename ExecutionContext>
basic_serial_port(ExecutionContext& context, const std::string& device,
typename constraint<
is_convertible<ExecutionContext&, execution_context&>::value
>::type = 0)
: impl_(0, 0, context)
{
boost::system::error_code ec;
impl_.get_service().open(impl_.get_implementation(), device, ec);
boost::asio::detail::throw_error(ec, "open");
}
/// Construct a basic_serial_port on an existing native serial port.
/**
* This constructor creates a serial port object to hold an existing native
* serial port.
*
* @param ex The I/O executor that the serial port will use, by default, to
* dispatch handlers for any asynchronous operations performed on the
* serial port.
*
* @param native_serial_port A native serial port.
*
* @throws boost::system::system_error Thrown on failure.
*/
basic_serial_port(const executor_type& ex,
const native_handle_type& native_serial_port)
: impl_(0, ex)
{
boost::system::error_code ec;
impl_.get_service().assign(impl_.get_implementation(),
native_serial_port, ec);
boost::asio::detail::throw_error(ec, "assign");
}
/// Construct a basic_serial_port on an existing native serial port.
/**
* This constructor creates a serial port object to hold an existing native
* serial port.
*
* @param context An execution context which provides the I/O executor that
* the serial port will use, by default, to dispatch handlers for any
* asynchronous operations performed on the serial port.
*
* @param native_serial_port A native serial port.
*
* @throws boost::system::system_error Thrown on failure.
*/
template <typename ExecutionContext>
basic_serial_port(ExecutionContext& context,
const native_handle_type& native_serial_port,
typename constraint<
is_convertible<ExecutionContext&, execution_context&>::value
>::type = 0)
: impl_(0, 0, context)
{
boost::system::error_code ec;
impl_.get_service().assign(impl_.get_implementation(),
native_serial_port, ec);
boost::asio::detail::throw_error(ec, "assign");
}
#if defined(BOOST_ASIO_HAS_MOVE) || defined(GENERATING_DOCUMENTATION)
/// Move-construct a basic_serial_port from another.
/**
* This constructor moves a serial port from one object to another.
*
* @param other The other basic_serial_port object from which the move will
* occur.
*
* @note Following the move, the moved-from object is in the same state as if
* constructed using the @c basic_serial_port(const executor_type&)
* constructor.
*/
basic_serial_port(basic_serial_port&& other)
: impl_(std::move(other.impl_))
{
}
/// Move-assign a basic_serial_port from another.
/**
* This assignment operator moves a serial port from one object to another.
*
* @param other The other basic_serial_port object from which the move will
* occur.
*
* @note Following the move, the moved-from object is in the same state as if
* constructed using the @c basic_serial_port(const executor_type&)
* constructor.
*/
basic_serial_port& operator=(basic_serial_port&& other)
{
impl_ = std::move(other.impl_);
return *this;
}
#endif // defined(BOOST_ASIO_HAS_MOVE) || defined(GENERATING_DOCUMENTATION)
/// Destroys the serial port.
/**
* This function destroys the serial port, cancelling any outstanding
* asynchronous wait operations associated with the serial port as if by
* calling @c cancel.
*/
~basic_serial_port()
{
}
/// Get the executor associated with the object.
executor_type get_executor() BOOST_ASIO_NOEXCEPT
{
return impl_.get_executor();
}
/// Get a reference to the lowest layer.
/**
* This function returns a reference to the lowest layer in a stack of
* layers. Since a basic_serial_port cannot contain any further layers, it
* simply returns a reference to itself.
*
* @return A reference to the lowest layer in the stack of layers. Ownership
* is not transferred to the caller.
*/
lowest_layer_type& lowest_layer()
{
return *this;
}
/// Get a const reference to the lowest layer.
/**
* This function returns a const reference to the lowest layer in a stack of
* layers. Since a basic_serial_port cannot contain any further layers, it
* simply returns a reference to itself.
*
* @return A const reference to the lowest layer in the stack of layers.
* Ownership is not transferred to the caller.
*/
const lowest_layer_type& lowest_layer() const
{
return *this;
}
/// Open the serial port using the specified device name.
/**
* This function opens the serial port for the specified device name.
*
* @param device The platform-specific device name.
*
* @throws boost::system::system_error Thrown on failure.
*/
void open(const std::string& device)
{
boost::system::error_code ec;
impl_.get_service().open(impl_.get_implementation(), device, ec);
boost::asio::detail::throw_error(ec, "open");
}
/// Open the serial port using the specified device name.
/**
* This function opens the serial port using the given platform-specific
* device name.
*
* @param device The platform-specific device name.
*
* @param ec Set the indicate what error occurred, if any.
*/
BOOST_ASIO_SYNC_OP_VOID open(const std::string& device,
boost::system::error_code& ec)
{
impl_.get_service().open(impl_.get_implementation(), device, ec);
BOOST_ASIO_SYNC_OP_VOID_RETURN(ec);
}
/// Assign an existing native serial port to the serial port.
/*
* This function opens the serial port to hold an existing native serial port.
*
* @param native_serial_port A native serial port.
*
* @throws boost::system::system_error Thrown on failure.
*/
void assign(const native_handle_type& native_serial_port)
{
boost::system::error_code ec;
impl_.get_service().assign(impl_.get_implementation(),
native_serial_port, ec);
boost::asio::detail::throw_error(ec, "assign");
}
/// Assign an existing native serial port to the serial port.
/*
* This function opens the serial port to hold an existing native serial port.
*
* @param native_serial_port A native serial port.
*
* @param ec Set to indicate what error occurred, if any.
*/
BOOST_ASIO_SYNC_OP_VOID assign(const native_handle_type& native_serial_port,
boost::system::error_code& ec)
{
impl_.get_service().assign(impl_.get_implementation(),
native_serial_port, ec);
BOOST_ASIO_SYNC_OP_VOID_RETURN(ec);
}
/// Determine whether the serial port is open.
bool is_open() const
{
return impl_.get_service().is_open(impl_.get_implementation());
}
/// Close the serial port.
/**
* This function is used to close the serial port. Any asynchronous read or
* write operations will be cancelled immediately, and will complete with the
* boost::asio::error::operation_aborted error.
*
* @throws boost::system::system_error Thrown on failure.
*/
void close()
{
boost::system::error_code ec;
impl_.get_service().close(impl_.get_implementation(), ec);
boost::asio::detail::throw_error(ec, "close");
}
/// Close the serial port.
/**
* This function is used to close the serial port. Any asynchronous read or
* write operations will be cancelled immediately, and will complete with the
* boost::asio::error::operation_aborted error.
*
* @param ec Set to indicate what error occurred, if any.
*/
BOOST_ASIO_SYNC_OP_VOID close(boost::system::error_code& ec)
{
impl_.get_service().close(impl_.get_implementation(), ec);
BOOST_ASIO_SYNC_OP_VOID_RETURN(ec);
}
/// Get the native serial port representation.
/**
* This function may be used to obtain the underlying representation of the
* serial port. This is intended to allow access to native serial port
* functionality that is not otherwise provided.
*/
native_handle_type native_handle()
{
return impl_.get_service().native_handle(impl_.get_implementation());
}
/// Cancel all asynchronous operations associated with the serial port.
/**
* This function causes all outstanding asynchronous read or write operations
* to finish immediately, and the handlers for cancelled operations will be
* passed the boost::asio::error::operation_aborted error.
*
* @throws boost::system::system_error Thrown on failure.
*/
void cancel()
{
boost::system::error_code ec;
impl_.get_service().cancel(impl_.get_implementation(), ec);
boost::asio::detail::throw_error(ec, "cancel");
}
/// Cancel all asynchronous operations associated with the serial port.
/**
* This function causes all outstanding asynchronous read or write operations
* to finish immediately, and the handlers for cancelled operations will be
* passed the boost::asio::error::operation_aborted error.
*
* @param ec Set to indicate what error occurred, if any.
*/
BOOST_ASIO_SYNC_OP_VOID cancel(boost::system::error_code& ec)
{
impl_.get_service().cancel(impl_.get_implementation(), ec);
BOOST_ASIO_SYNC_OP_VOID_RETURN(ec);
}
/// Send a break sequence to the serial port.
/**
* This function causes a break sequence of platform-specific duration to be
* sent out the serial port.
*
* @throws boost::system::system_error Thrown on failure.
*/
void send_break()
{
boost::system::error_code ec;
impl_.get_service().send_break(impl_.get_implementation(), ec);
boost::asio::detail::throw_error(ec, "send_break");
}
/// Send a break sequence to the serial port.
/**
* This function causes a break sequence of platform-specific duration to be
* sent out the serial port.
*
* @param ec Set to indicate what error occurred, if any.
*/
BOOST_ASIO_SYNC_OP_VOID send_break(boost::system::error_code& ec)
{
impl_.get_service().send_break(impl_.get_implementation(), ec);
BOOST_ASIO_SYNC_OP_VOID_RETURN(ec);
}
/// Set an option on the serial port.
/**
* This function is used to set an option on the serial port.
*
* @param option The option value to be set on the serial port.
*
* @throws boost::system::system_error Thrown on failure.
*
* @sa SettableSerialPortOption @n
* boost::asio::serial_port_base::baud_rate @n
* boost::asio::serial_port_base::flow_control @n
* boost::asio::serial_port_base::parity @n
* boost::asio::serial_port_base::stop_bits @n
* boost::asio::serial_port_base::character_size
*/
template <typename SettableSerialPortOption>
void set_option(const SettableSerialPortOption& option)
{
boost::system::error_code ec;
impl_.get_service().set_option(impl_.get_implementation(), option, ec);
boost::asio::detail::throw_error(ec, "set_option");
}
/// Set an option on the serial port.
/**
* This function is used to set an option on the serial port.
*
* @param option The option value to be set on the serial port.
*
* @param ec Set to indicate what error occurred, if any.
*
* @sa SettableSerialPortOption @n
* boost::asio::serial_port_base::baud_rate @n
* boost::asio::serial_port_base::flow_control @n
* boost::asio::serial_port_base::parity @n
* boost::asio::serial_port_base::stop_bits @n
* boost::asio::serial_port_base::character_size
*/
template <typename SettableSerialPortOption>
BOOST_ASIO_SYNC_OP_VOID set_option(const SettableSerialPortOption& option,
boost::system::error_code& ec)
{
impl_.get_service().set_option(impl_.get_implementation(), option, ec);
BOOST_ASIO_SYNC_OP_VOID_RETURN(ec);
}
/// Get an option from the serial port.
/**
* This function is used to get the current value of an option on the serial
* port.
*
* @param option The option value to be obtained from the serial port.
*
* @throws boost::system::system_error Thrown on failure.
*
* @sa GettableSerialPortOption @n
* boost::asio::serial_port_base::baud_rate @n
* boost::asio::serial_port_base::flow_control @n
* boost::asio::serial_port_base::parity @n
* boost::asio::serial_port_base::stop_bits @n
* boost::asio::serial_port_base::character_size
*/
template <typename GettableSerialPortOption>
void get_option(GettableSerialPortOption& option) const
{
boost::system::error_code ec;
impl_.get_service().get_option(impl_.get_implementation(), option, ec);
boost::asio::detail::throw_error(ec, "get_option");
}
/// Get an option from the serial port.
/**
* This function is used to get the current value of an option on the serial
* port.
*
* @param option The option value to be obtained from the serial port.
*
* @param ec Set to indicate what error occurred, if any.
*
* @sa GettableSerialPortOption @n
* boost::asio::serial_port_base::baud_rate @n
* boost::asio::serial_port_base::flow_control @n
* boost::asio::serial_port_base::parity @n
* boost::asio::serial_port_base::stop_bits @n
* boost::asio::serial_port_base::character_size
*/
template <typename GettableSerialPortOption>
BOOST_ASIO_SYNC_OP_VOID get_option(GettableSerialPortOption& option,
boost::system::error_code& ec) const
{
impl_.get_service().get_option(impl_.get_implementation(), option, ec);
BOOST_ASIO_SYNC_OP_VOID_RETURN(ec);
}
/// Write some data to the serial port.
/**
* This function is used to write data to the serial port. The function call
* will block until one or more bytes of the data has been written
* successfully, or until an error occurs.
*
* @param buffers One or more data buffers to be written to the serial port.
*
* @returns The number of bytes written.
*
* @throws boost::system::system_error Thrown on failure. An error code of
* boost::asio::error::eof indicates that the connection was closed by the
* peer.
*
* @note The write_some operation may not transmit all of the data to the
* peer. Consider using the @ref write function if you need to ensure that
* all data is written before the blocking operation completes.
*
* @par Example
* To write a single data buffer use the @ref buffer function as follows:
* @code
* basic_serial_port.write_some(boost::asio::buffer(data, size));
* @endcode
* See the @ref buffer documentation for information on writing multiple
* buffers in one go, and how to use it with arrays, boost::array or
* std::vector.
*/
template <typename ConstBufferSequence>
std::size_t write_some(const ConstBufferSequence& buffers)
{
boost::system::error_code ec;
std::size_t s = impl_.get_service().write_some(
impl_.get_implementation(), buffers, ec);
boost::asio::detail::throw_error(ec, "write_some");
return s;
}
/// Write some data to the serial port.
/**
* This function is used to write data to the serial port. The function call
* will block until one or more bytes of the data has been written
* successfully, or until an error occurs.
*
* @param buffers One or more data buffers to be written to the serial port.
*
* @param ec Set to indicate what error occurred, if any.
*
* @returns The number of bytes written. Returns 0 if an error occurred.
*
* @note The write_some operation may not transmit all of the data to the
* peer. Consider using the @ref write function if you need to ensure that
* all data is written before the blocking operation completes.
*/
template <typename ConstBufferSequence>
std::size_t write_some(const ConstBufferSequence& buffers,
boost::system::error_code& ec)
{
return impl_.get_service().write_some(
impl_.get_implementation(), buffers, ec);
}
/// Start an asynchronous write.
/**
* This function is used to asynchronously write data to the serial port.
* The function call always returns immediately.
*
* @param buffers One or more data buffers to be written to the serial port.
* Although the buffers object may be copied as necessary, ownership of the
* underlying memory blocks is retained by the caller, which must guarantee
* that they remain valid until the handler is called.
*
* @param handler The handler to be called when the write operation completes.
* Copies will be made of the handler as required. The function signature of
* the handler must be:
* @code void handler(
* const boost::system::error_code& error, // Result of operation.
* std::size_t bytes_transferred // Number of bytes written.
* ); @endcode
* Regardless of whether the asynchronous operation completes immediately or
* not, the handler will not be invoked from within this function. On
* immediate completion, invocation of the handler will be performed in a
* manner equivalent to using boost::asio::post().
*
* @note The write operation may not transmit all of the data to the peer.
* Consider using the @ref async_write function if you need to ensure that all
* data is written before the asynchronous operation completes.
*
* @par Example
* To write a single data buffer use the @ref buffer function as follows:
* @code
* basic_serial_port.async_write_some(
* boost::asio::buffer(data, size), handler);
* @endcode
* See the @ref buffer documentation for information on writing multiple
* buffers in one go, and how to use it with arrays, boost::array or
* std::vector.
*/
template <typename ConstBufferSequence,
BOOST_ASIO_COMPLETION_TOKEN_FOR(void (boost::system::error_code,
std::size_t)) WriteHandler
BOOST_ASIO_DEFAULT_COMPLETION_TOKEN_TYPE(executor_type)>
BOOST_ASIO_INITFN_AUTO_RESULT_TYPE(WriteHandler,
void (boost::system::error_code, std::size_t))
async_write_some(const ConstBufferSequence& buffers,
BOOST_ASIO_MOVE_ARG(WriteHandler) handler
BOOST_ASIO_DEFAULT_COMPLETION_TOKEN(executor_type))
{
return async_initiate<WriteHandler,
void (boost::system::error_code, std::size_t)>(
initiate_async_write_some(this), handler, buffers);
}
/// Read some data from the serial port.
/**
* This function is used to read data from the serial port. The function
* call will block until one or more bytes of data has been read successfully,
* or until an error occurs.
*
* @param buffers One or more buffers into which the data will be read.
*
* @returns The number of bytes read.
*
* @throws boost::system::system_error Thrown on failure. An error code of
* boost::asio::error::eof indicates that the connection was closed by the
* peer.
*
* @note The read_some operation may not read all of the requested number of
* bytes. Consider using the @ref read function if you need to ensure that
* the requested amount of data is read before the blocking operation
* completes.
*
* @par Example
* To read into a single data buffer use the @ref buffer function as follows:
* @code
* basic_serial_port.read_some(boost::asio::buffer(data, size));
* @endcode
* See the @ref buffer documentation for information on reading into multiple
* buffers in one go, and how to use it with arrays, boost::array or
* std::vector.
*/
template <typename MutableBufferSequence>
std::size_t read_some(const MutableBufferSequence& buffers)
{
boost::system::error_code ec;
std::size_t s = impl_.get_service().read_some(
impl_.get_implementation(), buffers, ec);
boost::asio::detail::throw_error(ec, "read_some");
return s;
}
/// Read some data from the serial port.
/**
* This function is used to read data from the serial port. The function
* call will block until one or more bytes of data has been read successfully,
* or until an error occurs.
*
* @param buffers One or more buffers into which the data will be read.
*
* @param ec Set to indicate what error occurred, if any.
*
* @returns The number of bytes read. Returns 0 if an error occurred.
*
* @note The read_some operation may not read all of the requested number of
* bytes. Consider using the @ref read function if you need to ensure that
* the requested amount of data is read before the blocking operation
* completes.
*/
template <typename MutableBufferSequence>
std::size_t read_some(const MutableBufferSequence& buffers,
boost::system::error_code& ec)
{
return impl_.get_service().read_some(
impl_.get_implementation(), buffers, ec);
}
/// Start an asynchronous read.
/**
* This function is used to asynchronously read data from the serial port.
* The function call always returns immediately.
*
* @param buffers One or more buffers into which the data will be read.
* Although the buffers object may be copied as necessary, ownership of the
* underlying memory blocks is retained by the caller, which must guarantee
* that they remain valid until the handler is called.
*
* @param handler The handler to be called when the read operation completes.
* Copies will be made of the handler as required. The function signature of
* the handler must be:
* @code void handler(
* const boost::system::error_code& error, // Result of operation.
* std::size_t bytes_transferred // Number of bytes read.
* ); @endcode
* Regardless of whether the asynchronous operation completes immediately or
* not, the handler will not be invoked from within this function. On
* immediate completion, invocation of the handler will be performed in a
* manner equivalent to using boost::asio::post().
*
* @note The read operation may not read all of the requested number of bytes.
* Consider using the @ref async_read function if you need to ensure that the
* requested amount of data is read before the asynchronous operation
* completes.
*
* @par Example
* To read into a single data buffer use the @ref buffer function as follows:
* @code
* basic_serial_port.async_read_some(
* boost::asio::buffer(data, size), handler);
* @endcode
* See the @ref buffer documentation for information on reading into multiple
* buffers in one go, and how to use it with arrays, boost::array or
* std::vector.
*/
template <typename MutableBufferSequence,
BOOST_ASIO_COMPLETION_TOKEN_FOR(void (boost::system::error_code,
std::size_t)) ReadHandler
BOOST_ASIO_DEFAULT_COMPLETION_TOKEN_TYPE(executor_type)>
BOOST_ASIO_INITFN_AUTO_RESULT_TYPE(ReadHandler,
void (boost::system::error_code, std::size_t))
async_read_some(const MutableBufferSequence& buffers,
BOOST_ASIO_MOVE_ARG(ReadHandler) handler
BOOST_ASIO_DEFAULT_COMPLETION_TOKEN(executor_type))
{
return async_initiate<ReadHandler,
void (boost::system::error_code, std::size_t)>(
initiate_async_read_some(this), handler, buffers);
}
private:
// Disallow copying and assignment.
basic_serial_port(const basic_serial_port&) BOOST_ASIO_DELETED;
basic_serial_port& operator=(const basic_serial_port&) BOOST_ASIO_DELETED;
class initiate_async_write_some
{
public:
typedef Executor executor_type;
explicit initiate_async_write_some(basic_serial_port* self)
: self_(self)
{
}
executor_type get_executor() const BOOST_ASIO_NOEXCEPT
{
return self_->get_executor();
}
template <typename WriteHandler, typename ConstBufferSequence>
void operator()(BOOST_ASIO_MOVE_ARG(WriteHandler) handler,
const ConstBufferSequence& buffers) const
{
// If you get an error on the following line it means that your handler
// does not meet the documented type requirements for a WriteHandler.
BOOST_ASIO_WRITE_HANDLER_CHECK(WriteHandler, handler) type_check;
detail::non_const_lvalue<WriteHandler> handler2(handler);
self_->impl_.get_service().async_write_some(
self_->impl_.get_implementation(), buffers,
handler2.value, self_->impl_.get_executor());
}
private:
basic_serial_port* self_;
};
class initiate_async_read_some
{
public:
typedef Executor executor_type;
explicit initiate_async_read_some(basic_serial_port* self)
: self_(self)
{
}
executor_type get_executor() const BOOST_ASIO_NOEXCEPT
{
return self_->get_executor();
}
template <typename ReadHandler, typename MutableBufferSequence>
void operator()(BOOST_ASIO_MOVE_ARG(ReadHandler) handler,
const MutableBufferSequence& buffers) const
{
// If you get an error on the following line it means that your handler
// does not meet the documented type requirements for a ReadHandler.
BOOST_ASIO_READ_HANDLER_CHECK(ReadHandler, handler) type_check;
detail::non_const_lvalue<ReadHandler> handler2(handler);
self_->impl_.get_service().async_read_some(
self_->impl_.get_implementation(), buffers,
handler2.value, self_->impl_.get_executor());
}
private:
basic_serial_port* self_;
};
#if defined(BOOST_ASIO_HAS_IOCP)
detail::io_object_impl<detail::win_iocp_serial_port_service, Executor> impl_;
#else
detail::io_object_impl<detail::reactive_serial_port_service, Executor> impl_;
#endif
};
} // namespace asio
} // namespace boost
#include <boost/asio/detail/pop_options.hpp>
#endif // defined(BOOST_ASIO_HAS_SERIAL_PORT)
// || defined(GENERATING_DOCUMENTATION)
#endif // BOOST_ASIO_BASIC_SERIAL_PORT_HPP
| mit |
cgiganti/chartmaster | node_modules/prova/node_modules/run-serially/node_modules/iter/lib/serial.js | 414 | var nextTick = require("just-next-tick");
module.exports = serial;
function serial (from, to, chain) {
nextTick(next);
var i = from;
function next(){
if (i >= to) {
chain.complete() && chain.complete()();
return;
}
chain.step()(function(error){
if (error) {
chain.error() && chain.error()(error);
return;
}
i++;
next();
}, i);
};
}
| mit |
motion3/dino_rumble | pygame/tests/surface_test.py | 96274 | import os
if __name__ == '__main__':
import sys
pkg_dir = os.path.split(os.path.abspath(__file__))[0]
parent_dir, pkg_name = os.path.split(pkg_dir)
is_pygame_pkg = (pkg_name == 'tests' and
os.path.split(parent_dir)[1] == 'pygame')
if not is_pygame_pkg:
sys.path.insert(0, parent_dir)
else:
is_pygame_pkg = __name__.startswith('pygame.tests.')
import unittest
if is_pygame_pkg:
from pygame.tests import test_utils
from pygame.tests.test_utils import example_path
try:
from pygame.tests.test_utils.arrinter import *
except ImportError:
pass
else:
from test import test_utils
from test.test_utils import example_path
try:
from test.test_utils.arrinter import *
except ImportError:
pass
import pygame
from pygame.locals import *
from pygame.compat import xrange_, as_bytes, as_unicode
from pygame.bufferproxy import BufferProxy
import gc
import weakref
import ctypes
def intify(i):
"""If i is a long, cast to an int while preserving the bits"""
if 0x80000000 & i:
return int((0xFFFFFFFF & i))
return i
def longify(i):
"""If i is an int, cast to a long while preserving the bits"""
if i < 0:
return 0xFFFFFFFF & i
return long(i)
class SurfaceTypeTest(unittest.TestCase):
def test_set_clip( self ):
""" see if surface.set_clip(None) works correctly.
"""
s = pygame.Surface((800, 600))
r = pygame.Rect(10, 10, 10, 10)
s.set_clip(r)
r.move_ip(10, 0)
s.set_clip(None)
res = s.get_clip()
# this was garbled before.
self.assertEqual(res[0], 0)
self.assertEqual(res[2], 800)
def test_print(self):
surf = pygame.Surface((70,70), 0, 32)
self.assertEqual(repr(surf), '<Surface(70x70x32 SW)>')
def test_keyword_arguments(self):
surf = pygame.Surface((70,70), flags=SRCALPHA, depth=32)
self.assertEqual(surf.get_flags() & SRCALPHA, SRCALPHA)
self.assertEqual(surf.get_bitsize(), 32)
# sanity check to make sure the check below is valid
surf_16 = pygame.Surface((70,70), 0, 16)
self.assertEqual(surf_16.get_bytesize(), 2)
# try again with an argument list
surf_16 = pygame.Surface((70,70), depth=16)
self.assertEqual(surf_16.get_bytesize(), 2)
def test_set_at(self):
#24bit surfaces
s = pygame.Surface( (100, 100), 0, 24)
s.fill((0,0,0))
# set it with a tuple.
s.set_at((0,0), (10,10,10, 255))
r = s.get_at((0,0))
self.failUnless(isinstance(r, pygame.Color))
self.assertEqual(r, (10,10,10, 255))
# try setting a color with a single integer.
s.fill((0,0,0,255))
s.set_at ((10, 1), 0x0000FF)
r = s.get_at((10,1))
self.assertEqual(r, (0,0,255, 255))
def test_SRCALPHA(self):
# has the flag been passed in ok?
surf = pygame.Surface((70,70), SRCALPHA, 32)
self.assertEqual(surf.get_flags() & SRCALPHA, SRCALPHA)
#24bit surfaces can not have SRCALPHA.
self.assertRaises(ValueError, pygame.Surface, (100, 100), pygame.SRCALPHA, 24)
# if we have a 32 bit surface, the SRCALPHA should have worked too.
surf2 = pygame.Surface((70,70), SRCALPHA)
if surf2.get_bitsize() == 32:
self.assertEqual(surf2.get_flags() & SRCALPHA, SRCALPHA)
def test_masks(self):
def make_surf(bpp, flags, masks):
pygame.Surface((10, 10), flags, bpp, masks)
# With some masks SDL_CreateRGBSurface does not work properly.
masks = (0xFF000000, 0xFF0000, 0xFF00, 0)
self.assertEqual(make_surf(32, 0, masks), None)
# For 24 and 32 bit surfaces Pygame assumes no losses.
masks = (0x7F0000, 0xFF00, 0xFF, 0)
self.failUnlessRaises(ValueError, make_surf, 24, 0, masks)
self.failUnlessRaises(ValueError, make_surf, 32, 0, masks)
# What contiguous bits in a mask.
masks = (0x6F0000, 0xFF00, 0xFF, 0)
self.failUnlessRaises(ValueError, make_surf, 32, 0, masks)
def test_get_bounding_rect (self):
surf = pygame.Surface ((70, 70), SRCALPHA, 32)
surf.fill((0,0,0,0))
bound_rect = surf.get_bounding_rect()
self.assertEqual(bound_rect.width, 0)
self.assertEqual(bound_rect.height, 0)
surf.set_at((30,30),(255,255,255,1))
bound_rect = surf.get_bounding_rect()
self.assertEqual(bound_rect.left, 30)
self.assertEqual(bound_rect.top, 30)
self.assertEqual(bound_rect.width, 1)
self.assertEqual(bound_rect.height, 1)
surf.set_at((29,29),(255,255,255,1))
bound_rect = surf.get_bounding_rect()
self.assertEqual(bound_rect.left, 29)
self.assertEqual(bound_rect.top, 29)
self.assertEqual(bound_rect.width, 2)
self.assertEqual(bound_rect.height, 2)
surf = pygame.Surface ((70, 70), 0, 24)
surf.fill((0,0,0))
bound_rect = surf.get_bounding_rect()
self.assertEqual(bound_rect.width, surf.get_width())
self.assertEqual(bound_rect.height, surf.get_height())
surf.set_colorkey((0,0,0))
bound_rect = surf.get_bounding_rect()
self.assertEqual(bound_rect.width, 0)
self.assertEqual(bound_rect.height, 0)
surf.set_at((30,30),(255,255,255))
bound_rect = surf.get_bounding_rect()
self.assertEqual(bound_rect.left, 30)
self.assertEqual(bound_rect.top, 30)
self.assertEqual(bound_rect.width, 1)
self.assertEqual(bound_rect.height, 1)
surf.set_at((60,60),(255,255,255))
bound_rect = surf.get_bounding_rect()
self.assertEqual(bound_rect.left, 30)
self.assertEqual(bound_rect.top, 30)
self.assertEqual(bound_rect.width, 31)
self.assertEqual(bound_rect.height, 31)
# Issue #180
pygame.display.init()
try:
surf = pygame.Surface((4, 1), 0, 8)
surf.fill((255, 255, 255))
surf.get_bounding_rect() # Segfault.
finally:
pygame.quit()
def test_copy(self):
# __doc__ (as of 2008-06-25) for pygame.surface.Surface.copy:
# Surface.copy(): return Surface
# create a new copy of a Surface
color = (25, 25, 25, 25)
s1 = pygame.Surface((32,32), pygame.SRCALPHA, 32)
s1.fill(color)
s2 = s1.copy()
s1rect = s1.get_rect()
s2rect = s2.get_rect()
self.assert_(s1rect.size == s2rect.size)
self.assert_(s2.get_at((10,10)) == color)
def test_fill(self):
# __doc__ (as of 2008-06-25) for pygame.surface.Surface.fill:
# Surface.fill(color, rect=None, special_flags=0): return Rect
# fill Surface with a solid color
color = (25, 25, 25, 25)
fill_rect = pygame.Rect(0, 0, 16, 16)
s1 = pygame.Surface((32,32), pygame.SRCALPHA, 32)
s1.fill(color, fill_rect)
for pt in test_utils.rect_area_pts(fill_rect):
self.assert_(s1.get_at(pt) == color )
for pt in test_utils.rect_outer_bounds(fill_rect):
self.assert_(s1.get_at(pt) != color )
def test_fill_negative_coordinates(self):
# negative coordinates should be clipped by fill, and not draw outside the surface.
color = (25, 25, 25, 25)
color2 = (20, 20, 20, 25)
fill_rect = pygame.Rect(-10, -10, 16, 16)
s1 = pygame.Surface((32,32), pygame.SRCALPHA, 32)
r1 = s1.fill(color, fill_rect)
c = s1.get_at((0,0))
self.assertEqual(c, color)
# make subsurface in the middle to test it doesn't over write.
s2 = s1.subsurface((5, 5, 5, 5))
r2 = s2.fill(color2, (-3, -3, 5, 5))
c2 = s1.get_at((4,4))
self.assertEqual(c, color)
# rect returns the area we actually fill.
r3 = s2.fill(color2, (-30, -30, 5, 5))
# since we are using negative coords, it should be an zero sized rect.
self.assertEqual(tuple(r3), (0, 0, 0, 0))
def test_fill_keyword_args(self):
color = (1, 2, 3, 255)
area = (1, 1, 2, 2)
s1 = pygame.Surface((4, 4), 0, 32)
s1.fill(special_flags=pygame.BLEND_ADD, color=color, rect=area)
self.assert_(s1.get_at((0, 0)) == (0, 0, 0, 255))
self.assert_(s1.get_at((1, 1)) == color)
########################################################################
def test_get_alpha(self):
# __doc__ (as of 2008-06-25) for pygame.surface.Surface.get_alpha:
# Surface.get_alpha(): return int_value or None
# get the current Surface transparency value
s1 = pygame.Surface((32,32), pygame.SRCALPHA, 32)
self.assert_(s1.get_alpha() == 255)
for alpha in (0, 32, 127, 255):
s1.set_alpha(alpha)
for t in range(4): s1.set_alpha(s1.get_alpha())
self.assert_(s1.get_alpha() == alpha)
########################################################################
def test_get_bytesize(self):
# __doc__ (as of 2008-06-25) for pygame.surface.Surface.get_bytesize:
# Surface.get_bytesize(): return int
# get the bytes used per Surface pixel
s1 = pygame.Surface((32,32), pygame.SRCALPHA, 32)
self.assert_(s1.get_bytesize() == 4)
self.assert_(s1.get_bitsize() == 32)
########################################################################
def test_get_flags(self):
# __doc__ (as of 2008-06-25) for pygame.surface.Surface.get_flags:
# Surface.get_flags(): return int
# get the additional flags used for the Surface
s1 = pygame.Surface((32,32), pygame.SRCALPHA, 32)
self.assert_(s1.get_flags() == pygame.SRCALPHA)
########################################################################
def test_get_parent(self):
# __doc__ (as of 2008-06-25) for pygame.surface.Surface.get_parent:
# Surface.get_parent(): return Surface
# find the parent of a subsurface
parent = pygame.Surface((16, 16))
child = parent.subsurface((0,0,5,5))
self.assert_(child.get_parent() is parent)
########################################################################
def test_get_rect(self):
# __doc__ (as of 2008-06-25) for pygame.surface.Surface.get_rect:
# Surface.get_rect(**kwargs): return Rect
# get the rectangular area of the Surface
surf = pygame.Surface((16, 16))
rect = surf.get_rect()
self.assert_(rect.size == (16, 16))
########################################################################
def test_get_width__size_and_height(self):
# __doc__ (as of 2008-06-25) for pygame.surface.Surface.get_width:
# Surface.get_width(): return width
# get the width of the Surface
for w in xrange_(0, 255, 32):
for h in xrange_(0, 127, 15):
s = pygame.Surface((w, h))
self.assertEquals(s.get_width(), w)
self.assertEquals(s.get_height(), h)
self.assertEquals(s.get_size(), (w, h))
def test_get_view(self):
# Check that BufferProxys are returned when array depth is supported,
# ValueErrors returned otherwise.
Error = ValueError
s = pygame.Surface((5, 7), 0, 8)
self.assertRaises(Error, s.get_view, '0')
self.assertRaises(Error, s.get_view, '1')
v = s.get_view('2')
self.assert_(isinstance(v, BufferProxy))
self.assertRaises(Error, s.get_view, '3')
s = pygame.Surface((8, 7), 0, 8)
length = s.get_bytesize() * s.get_width() * s.get_height()
v = s.get_view('0')
self.assert_(isinstance(v, BufferProxy))
self.assertEqual(v.length, length)
v = s.get_view('1')
self.assert_(isinstance(v, BufferProxy))
self.assertEqual(v.length, length)
s = pygame.Surface((5, 7), 0, 16)
self.assertRaises(Error, s.get_view, '0')
self.assertRaises(Error, s.get_view, '1')
v = s.get_view('2')
self.assert_(isinstance(v, BufferProxy))
self.assertRaises(Error, s.get_view, '3')
s = pygame.Surface((8, 7), 0, 16)
length = s.get_bytesize() * s.get_width() * s.get_height()
v = s.get_view('0')
self.assert_(isinstance(v, BufferProxy))
self.assertEqual(v.length, length)
v = s.get_view('1')
self.assert_(isinstance(v, BufferProxy))
self.assertEqual(v.length, length)
s = pygame.Surface((5, 7), pygame.SRCALPHA, 16)
v = s.get_view('2')
self.assert_(isinstance(v, BufferProxy))
self.assertRaises(Error, s.get_view, '3')
s = pygame.Surface((5, 7), 0, 24)
self.assertRaises(Error, s.get_view, '0')
self.assertRaises(Error, s.get_view, '1')
v = s.get_view('2')
self.assertTrue(isinstance(v, BufferProxy))
v = s.get_view('3')
self.assert_(isinstance(v, BufferProxy))
s = pygame.Surface((8, 7), 0, 24)
length = s.get_bytesize() * s.get_width() * s.get_height()
v = s.get_view('0')
self.assert_(isinstance(v, BufferProxy))
self.assertEqual(v.length, length)
v = s.get_view('1')
self.assertTrue(isinstance(v, BufferProxy))
self.assertEqual(v.length, length)
s = pygame.Surface((5, 7), 0, 32)
length = s.get_bytesize() * s.get_width() * s.get_height()
v = s.get_view('0')
self.assert_(isinstance(v, BufferProxy))
self.assertEqual(v.length, length)
v = s.get_view('1')
self.assert_(isinstance(v, BufferProxy))
self.assertEqual(v.length, length)
v = s.get_view('2')
self.assert_(isinstance(v, BufferProxy))
v = s.get_view('3')
self.assert_(isinstance(v, BufferProxy))
s2 = s.subsurface((0, 0, 4, 7))
self.assertRaises(Error, s2.get_view, '0')
self.assertRaises(Error, s2.get_view, '1')
s2 = None
s = pygame.Surface((5, 7), pygame.SRCALPHA, 32)
v = s.get_view('2')
self.assert_(isinstance(v, BufferProxy))
v = s.get_view('3')
self.assert_(isinstance(v, BufferProxy))
v = s.get_view('a')
self.assert_(isinstance(v, BufferProxy))
v = s.get_view('A')
self.assert_(isinstance(v, BufferProxy))
v = s.get_view('r')
self.assert_(isinstance(v, BufferProxy))
v = s.get_view('G')
self.assert_(isinstance(v, BufferProxy))
v = s.get_view('g')
self.assert_(isinstance(v, BufferProxy))
v = s.get_view('B')
self.assert_(isinstance(v, BufferProxy))
v = s.get_view('b')
# Check default argument value: '2'
s = pygame.Surface((2, 4), 0, 32)
v = s.get_view()
ai = ArrayInterface(v)
self.assertEqual(ai.nd, 2)
# Check locking.
s = pygame.Surface((2, 4), 0, 32)
self.assert_(not s.get_locked())
v = s.get_view('2')
self.assert_(not s.get_locked())
c = v.__array_interface__
self.assert_(s.get_locked())
c = None
gc.collect()
self.assert_(s.get_locked())
v = None
gc.collect()
self.assert_(not s.get_locked())
# Check invalid view kind values.
s = pygame.Surface((2, 4), pygame.SRCALPHA, 32)
self.assertRaises(TypeError, s.get_view, '')
self.assertRaises(TypeError, s.get_view, '9')
self.assertRaises(TypeError, s.get_view, 'RGBA')
self.assertRaises(TypeError, s.get_view, 2)
# Both unicode and bytes strings are allowed for kind.
s = pygame.Surface((2, 4), 0, 32)
s.get_view(as_unicode('2'))
s.get_view(as_bytes('2'))
# Garbage collection
s = pygame.Surface((2, 4), 0, 32)
weak_s = weakref.ref(s)
v = s.get_view('3')
weak_v = weakref.ref(v)
gc.collect()
self.assertTrue(weak_s() is s)
self.assertTrue(weak_v() is v)
del v
gc.collect()
self.assertTrue(weak_s() is s)
self.assertTrue(weak_v() is None)
del s
gc.collect()
self.assertTrue(weak_s() is None)
def test_get_buffer(self):
# Check that get_buffer works for all pixel sizes and for a subsurface.
# Check for all pixel sizes
for bitsize in [8, 16, 24, 32]:
s = pygame.Surface((5, 7), 0, bitsize)
length = s.get_pitch() * s.get_height()
v = s.get_buffer()
self.assert_(isinstance(v, BufferProxy))
self.assertEqual(v.length, length)
self.assertEqual(repr(v), "<BufferProxy(" + str(length) + ")>")
# Check for a subsurface (not contiguous)
s = pygame.Surface((7, 10), 0, 32)
s2 = s.subsurface((1, 2, 5, 7))
length = s2.get_pitch() * s2.get_height()
v = s2.get_buffer()
self.assert_(isinstance(v, BufferProxy))
self.assertEqual(v.length, length)
# Check locking.
s = pygame.Surface((2, 4), 0, 32)
v = s.get_buffer()
self.assertTrue(s.get_locked())
v = None
gc.collect()
self.assertFalse(s.get_locked())
try:
pygame.bufferproxy.get_segcount
except AttributeError:
pass
else:
def test_get_buffer_oldbuf(self):
self.OLDBUF_get_buffer_oldbuf()
def test_get_view_oldbuf(self):
self.OLDBUF_get_view_oldbuf()
def OLDBUF_get_buffer_oldbuf(self):
from pygame.bufferproxy import get_segcount, get_write_buffer
s = pygame.Surface((2, 4), pygame.SRCALPHA, 32)
v = s.get_buffer()
segcount, buflen = get_segcount(v)
self.assertEqual(segcount, 1)
self.assertEqual(buflen, s.get_pitch() * s.get_height())
seglen, segaddr = get_write_buffer(v, 0)
self.assertEqual(segaddr, s._pixels_address)
self.assertEqual(seglen, buflen)
def OLDBUF_get_view_oldbuf(self):
from pygame.bufferproxy import get_segcount, get_write_buffer
s = pygame.Surface((2, 4), pygame.SRCALPHA, 32)
v = s.get_view('1')
segcount, buflen = get_segcount(v)
self.assertEqual(segcount, 8)
self.assertEqual(buflen, s.get_pitch() * s.get_height())
seglen, segaddr = get_write_buffer(v, 7)
self.assertEqual(segaddr, s._pixels_address + s.get_bytesize() * 7)
self.assertEqual(seglen, s.get_bytesize())
def test_set_colorkey(self):
# __doc__ (as of 2008-06-25) for pygame.surface.Surface.set_colorkey:
# Surface.set_colorkey(Color, flags=0): return None
# Surface.set_colorkey(None): return None
# Set the transparent colorkey
s = pygame.Surface((16,16), pygame.SRCALPHA, 32)
colorkeys = ((20,189,20, 255),(128,50,50,255), (23, 21, 255,255))
for colorkey in colorkeys:
s.set_colorkey(colorkey)
for t in range(4): s.set_colorkey(s.get_colorkey())
self.assertEquals(s.get_colorkey(), colorkey)
def test_set_masks(self):
s = pygame.Surface((32,32))
r,g,b,a = s.get_masks()
s.set_masks((b,g,r,a))
r2,g2,b2,a2 = s.get_masks()
self.assertEqual((r,g,b,a), (b2,g2,r2,a2))
def test_set_shifts(self):
s = pygame.Surface((32,32))
r,g,b,a = s.get_shifts()
s.set_shifts((b,g,r,a))
r2,g2,b2,a2 = s.get_shifts()
self.assertEqual((r,g,b,a), (b2,g2,r2,a2))
def test_blit_keyword_args(self):
color = (1, 2, 3, 255)
s1 = pygame.Surface((4, 4), 0, 32)
s2 = pygame.Surface((2, 2), 0, 32)
s2.fill((1, 2, 3))
s1.blit(special_flags=BLEND_ADD, source=s2,
dest=(1, 1), area=s2.get_rect())
self.assertEqual(s1.get_at((0, 0)), (0, 0, 0, 255))
self.assertEqual(s1.get_at((1, 1)), color)
def todo_test_blit(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.blit:
# Surface.blit(source, dest, area=None, special_flags = 0): return Rect
# draw one image onto another
#
# Draws a source Surface onto this Surface. The draw can be positioned
# with the dest argument. Dest can either be pair of coordinates
# representing the upper left corner of the source. A Rect can also be
# passed as the destination and the topleft corner of the rectangle
# will be used as the position for the blit. The size of the
# destination rectangle does not effect the blit.
#
# An optional area rectangle can be passed as well. This represents a
# smaller portion of the source Surface to draw.
#
# An optional special flags is for passing in new in 1.8.0: BLEND_ADD,
# BLEND_SUB, BLEND_MULT, BLEND_MIN, BLEND_MAX new in 1.8.1:
# BLEND_RGBA_ADD, BLEND_RGBA_SUB, BLEND_RGBA_MULT, BLEND_RGBA_MIN,
# BLEND_RGBA_MAX BLEND_RGB_ADD, BLEND_RGB_SUB, BLEND_RGB_MULT,
# BLEND_RGB_MIN, BLEND_RGB_MAX With other special blitting flags
# perhaps added in the future.
#
# The return rectangle is the area of the affected pixels, excluding
# any pixels outside the destination Surface, or outside the clipping
# area.
#
# Pixel alphas will be ignored when blitting to an 8 bit Surface.
# special_flags new in pygame 1.8.
self.fail()
def test_blit__SRCALPHA_opaque_source(self):
src = pygame.Surface( (256,256), SRCALPHA ,32)
dst = src.copy()
for i, j in test_utils.rect_area_pts(src.get_rect()):
dst.set_at( (i,j), (i,0,0,j) )
src.set_at( (i,j), (0,i,0,255) )
dst.blit(src, (0,0))
for pt in test_utils.rect_area_pts(src.get_rect()):
self.assertEquals ( dst.get_at(pt)[1], src.get_at(pt)[1] )
def todo_test_blit__blit_to_self(self): #TODO
src = pygame.Surface( (256,256), SRCALPHA, 32)
rect = src.get_rect()
for pt, color in test_utils.gradient(rect.width, rect.height):
src.set_at(pt, color)
src.blit(src, (0, 0))
def todo_test_blit__SRCALPHA_to_SRCALPHA_non_zero(self): #TODO
# " There is no unit test for blitting a SRCALPHA source with non-zero
# alpha to a SRCALPHA destination with non-zero alpha " LL
w,h = size = 32,32
s = pygame.Surface(size, pygame.SRCALPHA, 32)
s2 = s.copy()
s.fill((32,32,32,111))
s2.fill((32,32,32,31))
s.blit(s2, (0,0))
# TODO:
# what is the correct behaviour ?? should it blend? what algorithm?
self.assertEquals(s.get_at((0,0)), (32,32,32,31))
def test_blit__SRCALPHA32_to_8(self):
# Bug: fatal
# SDL_DisplayConvert segfaults when video is uninitialized.
target = pygame.Surface((11, 8), 0, 8)
color = target.get_palette_at(2)
source = pygame.Surface((1, 1), pygame.SRCALPHA, 32)
source.set_at((0, 0), color)
target.blit(source, (0, 0))
def test_image_convert_bug_131(self):
# Bitbucket bug #131: Unable to Surface.convert(32) some 1-bit images.
# https://bitbucket.org/pygame/pygame/issue/131/unable-to-surfaceconvert-32-some-1-bit
# Skip test_image_convert_bug_131 for headless tests.
if os.environ.get('SDL_VIDEODRIVER') == 'dummy':
return
pygame.display.init()
pygame.display.set_mode((640,480))
im = pygame.image.load(example_path(os.path.join("data", "city.png")))
im2 = pygame.image.load(example_path(os.path.join("data", "brick.png")))
self.assertEquals( im.get_palette(), ((0, 0, 0, 255), (255, 255, 255, 255)) )
self.assertEquals( im2.get_palette(), ((0, 0, 0, 255), (0, 0, 0, 255)) )
self.assertEqual(repr(im.convert(32)), '<Surface(24x24x32 SW)>')
self.assertEqual(repr(im2.convert(32)), '<Surface(469x137x32 SW)>')
# Ensure a palette format to palette format works.
im3 = im.convert(8)
self.assertEqual(repr(im3), '<Surface(24x24x8 SW)>')
self.assertEqual(im3.get_palette(), im.get_palette())
# It is still an error when the target format really does have
# an empty palette (all the entries are black).
self.assertRaises(pygame.error, im2.convert, 8)
self.assertEqual(pygame.get_error(), "Empty destination palette")
def todo_test_convert(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.convert:
# Surface.convert(Surface): return Surface
# Surface.convert(depth, flags=0): return Surface
# Surface.convert(masks, flags=0): return Surface
# Surface.convert(): return Surface
# change the pixel format of an image
#
# Creates a new copy of the Surface with the pixel format changed. The
# new pixel format can be determined from another existing Surface.
# Otherwise depth, flags, and masks arguments can be used, similar to
# the pygame.Surface() call.
#
# If no arguments are passed the new Surface will have the same pixel
# format as the display Surface. This is always the fastest format for
# blitting. It is a good idea to convert all Surfaces before they are
# blitted many times.
#
# The converted Surface will have no pixel alphas. They will be
# stripped if the original had them. See Surface.convert_alpha() for
# preserving or creating per-pixel alphas.
#
self.fail()
def todo_test_convert_alpha(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.convert_alpha:
# Surface.convert_alpha(Surface): return Surface
# Surface.convert_alpha(): return Surface
# change the pixel format of an image including per pixel alphas
#
# Creates a new copy of the surface with the desired pixel format. The
# new surface will be in a format suited for quick blitting to the
# given format with per pixel alpha. If no surface is given, the new
# surface will be optimized for blitting to the current display.
#
# Unlike the Surface.convert() method, the pixel format for the new
# image will not be exactly the same as the requested source, but it
# will be optimized for fast alpha blitting to the destination.
#
self.fail()
def todo_test_get_abs_offset(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_abs_offset:
# Surface.get_abs_offset(): return (x, y)
# find the absolute position of a child subsurface inside its top level parent
#
# Get the offset position of a child subsurface inside of its top
# level parent Surface. If the Surface is not a subsurface this will
# return (0, 0).
#
self.fail()
def todo_test_get_abs_parent(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_abs_parent:
# Surface.get_abs_parent(): return Surface
# find the top level parent of a subsurface
#
# Returns the parent Surface of a subsurface. If this is not a
# subsurface then this surface will be returned.
#
self.fail()
def test_get_at(self):
surf = pygame.Surface((2, 2), 0, 24)
c00 = pygame.Color(1, 2, 3)
c01 = pygame.Color(5, 10, 15)
c10 = pygame.Color(100, 50, 0)
c11 = pygame.Color(4, 5, 6)
surf.set_at((0, 0), c00)
surf.set_at((0, 1), c01)
surf.set_at((1, 0), c10)
surf.set_at((1, 1), c11)
c = surf.get_at((0, 0))
self.failUnless(isinstance(c, pygame.Color))
self.failUnlessEqual(c, c00)
self.failUnlessEqual(surf.get_at((0, 1)), c01)
self.failUnlessEqual(surf.get_at((1, 0)), c10)
self.failUnlessEqual(surf.get_at((1, 1)), c11)
for p in [(-1, 0), (0, -1), (2, 0), (0, 2)]:
self.failUnlessRaises(IndexError, surf.get_at, p)
def test_get_at_mapped(self):
color = pygame.Color(10, 20, 30)
for bitsize in [8, 16, 24, 32]:
surf = pygame.Surface((2, 2), 0, bitsize)
surf.fill(color)
pixel = surf.get_at_mapped((0, 0))
self.failUnlessEqual(pixel, surf.map_rgb(color),
"%i != %i, bitsize: %i" %
(pixel, surf.map_rgb(color), bitsize))
def todo_test_get_bitsize(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_bitsize:
# Surface.get_bitsize(): return int
# get the bit depth of the Surface pixel format
#
# Returns the number of bits used to represent each pixel. This value
# may not exactly fill the number of bytes used per pixel. For example
# a 15 bit Surface still requires a full 2 bytes.
#
self.fail()
def todo_test_get_clip(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_clip:
# Surface.get_clip(): return Rect
# get the current clipping area of the Surface
#
# Return a rectangle of the current clipping area. The Surface will
# always return a valid rectangle that will never be outside the
# bounds of the image. If the Surface has had None set for the
# clipping area, the Surface will return a rectangle with the full
# area of the Surface.
#
self.fail()
def todo_test_get_colorkey(self):
surf = pygame.surface((2, 2), 0, 24)
self.failUnless(surf.get_colorykey() is None)
colorkey = pygame.Color(20, 40, 60)
surf.set_colorkey(colorkey)
ck = surf.get_colorkey()
self.failUnless(isinstance(ck, pygame.Color))
self.failUnlessEqual(ck, colorkey)
def todo_test_get_height(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_height:
# Surface.get_height(): return height
# get the height of the Surface
#
# Return the height of the Surface in pixels.
self.fail()
def todo_test_get_locked(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_locked:
# Surface.get_locked(): return bool
# test if the Surface is current locked
#
# Returns True when the Surface is locked. It doesn't matter how many
# times the Surface is locked.
#
self.fail()
def todo_test_get_locks(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_locks:
# Surface.get_locks(): return tuple
# Gets the locks for the Surface
#
# Returns the currently existing locks for the Surface.
self.fail()
def todo_test_get_losses(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_losses:
# Surface.get_losses(): return (R, G, B, A)
# the significant bits used to convert between a color and a mapped integer
#
# Return the least significant number of bits stripped from each color
# in a mapped integer.
#
# This value is not needed for normal Pygame usage.
self.fail()
def todo_test_get_masks(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_masks:
# Surface.get_masks(): return (R, G, B, A)
# the bitmasks needed to convert between a color and a mapped integer
#
# Returns the bitmasks used to isolate each color in a mapped integer.
# This value is not needed for normal Pygame usage.
self.fail()
def todo_test_get_offset(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_offset:
# Surface.get_offset(): return (x, y)
# find the position of a child subsurface inside a parent
#
# Get the offset position of a child subsurface inside of a parent. If
# the Surface is not a subsurface this will return (0, 0).
#
self.fail()
def test_get_palette(self):
pygame.init()
try:
palette = [Color(i, i, i) for i in range(256)]
pygame.display.set_mode((100, 50))
surf = pygame.Surface((2, 2), 0, 8)
surf.set_palette(palette)
palette2 = surf.get_palette()
r,g,b = palette2[0]
self.failUnlessEqual(len(palette2), len(palette))
for c2, c in zip(palette2, palette):
self.failUnlessEqual(c2, c)
for c in palette2:
self.failUnless(isinstance(c, pygame.Color))
finally:
pygame.quit()
def test_get_palette_at(self):
# See also test_get_palette
pygame.init()
try:
pygame.display.set_mode((100, 50))
surf = pygame.Surface((2, 2), 0, 8)
color = pygame.Color(1, 2, 3, 255)
surf.set_palette_at(0, color)
color2 = surf.get_palette_at(0)
self.failUnless(isinstance(color2, pygame.Color))
self.failUnlessEqual(color2, color)
self.failUnlessRaises(IndexError, surf.get_palette_at, -1)
self.failUnlessRaises(IndexError, surf.get_palette_at, 256)
finally:
pygame.quit()
def todo_test_get_pitch(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_pitch:
# Surface.get_pitch(): return int
# get the number of bytes used per Surface row
#
# Return the number of bytes separating each row in the Surface.
# Surfaces in video memory are not always linearly packed. Subsurfaces
# will also have a larger pitch than their real width.
#
# This value is not needed for normal Pygame usage.
self.fail()
def todo_test_get_shifts(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_shifts:
# Surface.get_shifts(): return (R, G, B, A)
# the bit shifts needed to convert between a color and a mapped integer
#
# Returns the pixel shifts need to convert between each color and a
# mapped integer.
#
# This value is not needed for normal Pygame usage.
self.fail()
def todo_test_get_size(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.get_size:
# Surface.get_size(): return (width, height)
# get the dimensions of the Surface
#
# Return the width and height of the Surface in pixels.
self.fail()
def todo_test_lock(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.lock:
# Surface.lock(): return None
# lock the Surface memory for pixel access
#
# Lock the pixel data of a Surface for access. On accelerated
# Surfaces, the pixel data may be stored in volatile video memory or
# nonlinear compressed forms. When a Surface is locked the pixel
# memory becomes available to access by regular software. Code that
# reads or writes pixel values will need the Surface to be locked.
#
# Surfaces should not remain locked for more than necessary. A locked
# Surface can often not be displayed or managed by Pygame.
#
# Not all Surfaces require locking. The Surface.mustlock() method can
# determine if it is actually required. There is no performance
# penalty for locking and unlocking a Surface that does not need it.
#
# All pygame functions will automatically lock and unlock the Surface
# data as needed. If a section of code is going to make calls that
# will repeatedly lock and unlock the Surface many times, it can be
# helpful to wrap the block inside a lock and unlock pair.
#
# It is safe to nest locking and unlocking calls. The surface will
# only be unlocked after the final lock is released.
#
self.fail()
def test_map_rgb(self):
color = Color(0, 128, 255, 64)
surf = pygame.Surface((5, 5), SRCALPHA, 32)
c = surf.map_rgb(color)
self.failUnlessEqual(surf.unmap_rgb(c), color)
self.failUnlessEqual(surf.get_at((0, 0)), (0, 0, 0, 0))
surf.fill(c)
self.failUnlessEqual(surf.get_at((0, 0)), color)
surf.fill((0, 0, 0, 0))
self.failUnlessEqual(surf.get_at((0, 0)), (0, 0, 0, 0))
surf.set_at((0, 0), c)
self.failUnlessEqual(surf.get_at((0, 0)), color)
def todo_test_mustlock(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.mustlock:
# Surface.mustlock(): return bool
# test if the Surface requires locking
#
# Returns True if the Surface is required to be locked to access pixel
# data. Usually pure software Surfaces do not require locking. This
# method is rarely needed, since it is safe and quickest to just lock
# all Surfaces as needed.
#
# All pygame functions will automatically lock and unlock the Surface
# data as needed. If a section of code is going to make calls that
# will repeatedly lock and unlock the Surface many times, it can be
# helpful to wrap the block inside a lock and unlock pair.
#
self.fail()
def todo_test_set_alpha(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.set_alpha:
# Surface.set_alpha(value, flags=0): return None
# Surface.set_alpha(None): return None
# set the alpha value for the full Surface image
#
# Set the current alpha value fo r the Surface. When blitting this
# Surface onto a destination, the pixels will be drawn slightly
# transparent. The alpha value is an integer from 0 to 255, 0 is fully
# transparent and 255 is fully opaque. If None is passed for the alpha
# value, then the Surface alpha will be disabled.
#
# This value is different than the per pixel Surface alpha. If the
# Surface format contains per pixel alphas, then this alpha value will
# be ignored. If the Surface contains per pixel alphas, setting the
# alpha value to None will disable the per pixel transparency.
#
# The optional flags argument can be set to pygame.RLEACCEL to provide
# better performance on non accelerated displays. An RLEACCEL Surface
# will be slower to modify, but quicker to blit as a source.
#
s = pygame.Surface((1,1), SRCALHPA, 32)
s.fill((1, 2, 3, 4))
s.set_alpha(None)
self.failUnlessEqual(s.get_at((0, 0)), (1, 2, 3, 255))
self.fail()
def test_set_palette(self):
palette = [pygame.Color(i, i, i) for i in range(256)]
palette[10] = tuple(palette[10]) # 4 element tuple
palette[11] = tuple(palette[11])[0:3] # 3 element tuple
surf = pygame.Surface((2, 2), 0, 8)
pygame.init()
try:
pygame.display.set_mode((100, 50))
surf.set_palette(palette)
for i in range(256):
self.failUnlessEqual(surf.map_rgb(palette[i]), i,
"palette color %i" % (i,))
c = palette[i]
surf.fill(c)
self.failUnlessEqual(surf.get_at((0, 0)), c,
"palette color %i" % (i,))
for i in range(10):
palette[i] = pygame.Color(255 - i, 0, 0)
surf.set_palette(palette[0:10])
for i in range(256):
self.failUnlessEqual(surf.map_rgb(palette[i]), i,
"palette color %i" % (i,))
c = palette[i]
surf.fill(c)
self.failUnlessEqual(surf.get_at((0, 0)), c,
"palette color %i" % (i,))
self.failUnlessRaises(ValueError, surf.set_palette,
[Color(1, 2, 3, 254)])
self.failUnlessRaises(ValueError, surf.set_palette,
(1, 2, 3, 254))
finally:
pygame.quit()
def test_set_palette_at(self):
pygame.init()
try:
pygame.display.set_mode((100, 50))
surf = pygame.Surface((2, 2), 0, 8)
original = surf.get_palette_at(10)
replacement = Color(1, 1, 1, 255)
if replacement == original:
replacement = Color(2, 2, 2, 255)
surf.set_palette_at(10, replacement)
self.failUnlessEqual(surf.get_palette_at(10), replacement)
next = tuple(original)
surf.set_palette_at(10, next)
self.failUnlessEqual(surf.get_palette_at(10), next)
next = tuple(original)[0:3]
surf.set_palette_at(10, next)
self.failUnlessEqual(surf.get_palette_at(10), next)
self.failUnlessRaises(IndexError,
surf.set_palette_at,
256, replacement)
self.failUnlessRaises(IndexError,
surf.set_palette_at,
-1, replacement)
finally:
pygame.quit()
def test_subsurface(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.subsurface:
# Surface.subsurface(Rect): return Surface
# create a new surface that references its parent
#
# Returns a new Surface that shares its pixels with its new parent.
# The new Surface is considered a child of the original. Modifications
# to either Surface pixels will effect each other. Surface information
# like clipping area and color keys are unique to each Surface.
#
# The new Surface will inherit the palette, color key, and alpha
# settings from its parent.
#
# It is possible to have any number of subsurfaces and subsubsurfaces
# on the parent. It is also possible to subsurface the display Surface
# if the display mode is not hardware accelerated.
#
# See the Surface.get_offset(), Surface.get_parent() to learn more
# about the state of a subsurface.
#
surf = pygame.Surface((16, 16))
s = surf.subsurface(0,0,1,1)
s = surf.subsurface((0,0,1,1))
#s = surf.subsurface((0,0,1,1), 1)
# This form is not acceptable.
#s = surf.subsurface(0,0,10,10, 1)
self.assertRaises(ValueError, surf.subsurface, (0,0,1,1,666))
self.assertEquals(s.get_shifts(), surf.get_shifts())
self.assertEquals(s.get_masks(), surf.get_masks())
self.assertEquals(s.get_losses(), surf.get_losses())
# Issue 2 at Bitbucket.org/pygame/pygame
surf = pygame.Surface.__new__(pygame.Surface)
self.assertRaises(pygame.error, surf.subsurface, (0, 0, 0, 0))
def todo_test_unlock(self):
# __doc__ (as of 2008-08-02) for pygame.surface.Surface.unlock:
# Surface.unlock(): return None
# unlock the Surface memory from pixel access
#
# Unlock the Surface pixel data after it has been locked. The unlocked
# Surface can once again be drawn and managed by Pygame. See the
# Surface.lock() documentation for more details.
#
# All pygame functions will automatically lock and unlock the Surface
# data as needed. If a section of code is going to make calls that
# will repeatedly lock and unlock the Surface many times, it can be
# helpful to wrap the block inside a lock and unlock pair.
#
# It is safe to nest locking and unlocking calls. The surface will
# only be unlocked after the final lock is released.
#
self.fail()
def test_unmap_rgb(self):
# Special case, 8 bit-per-pixel surface (has a palette).
surf = pygame.Surface((2, 2), 0, 8)
c = (1, 1, 1) # Unlikely to be in a default palette.
i = 67
pygame.init()
try:
pygame.display.set_mode((100, 50))
surf.set_palette_at(i, c)
unmapped_c = surf.unmap_rgb(i)
self.failUnlessEqual(unmapped_c, c)
# Confirm it is a Color instance
self.failUnless(isinstance(unmapped_c, pygame.Color))
finally:
pygame.quit()
# Remaining, non-pallete, cases.
c = (128, 64, 12, 255)
formats = [(0, 16), (0, 24), (0, 32),
(SRCALPHA, 16), (SRCALPHA, 32)]
for flags, bitsize in formats:
surf = pygame.Surface((2, 2), flags, bitsize)
unmapped_c = surf.unmap_rgb(surf.map_rgb(c))
surf.fill(c)
comparison_c = surf.get_at((0, 0))
self.failUnlessEqual(unmapped_c, comparison_c,
"%s != %s, flags: %i, bitsize: %i" %
(unmapped_c, comparison_c, flags, bitsize))
# Confirm it is a Color instance
self.failUnless(isinstance(unmapped_c, pygame.Color))
def test_scroll(self):
scrolls = [(8, 2, 3),
(16, 2, 3),
(24, 2, 3),
(32, 2, 3),
(32, -1, -3),
(32, 0, 0),
(32, 11, 0),
(32, 0, 11),
(32, -11, 0),
(32, 0, -11),
(32, -11, 2),
(32, 2, -11)]
for bitsize, dx, dy in scrolls:
surf = pygame.Surface((10, 10), 0, bitsize)
surf.fill((255, 0, 0))
surf.fill((0, 255, 0), (2, 2, 2, 2,))
comp = surf.copy()
comp.blit(surf, (dx, dy))
surf.scroll(dx, dy)
w, h = surf.get_size()
for x in range(w):
for y in range(h):
self.failUnlessEqual(surf.get_at((x, y)),
comp.get_at((x, y)),
"%s != %s, bpp:, %i, x: %i, y: %i" %
(surf.get_at((x, y)),
comp.get_at((x, y)),
bitsize, dx, dy))
# Confirm clip rect containment
surf = pygame.Surface((20, 13), 0, 32)
surf.fill((255, 0, 0))
surf.fill((0, 255, 0), (7, 1, 6, 6))
comp = surf.copy()
clip = Rect(3, 1, 8, 14)
surf.set_clip(clip)
comp.set_clip(clip)
comp.blit(surf, (clip.x + 2, clip.y + 3), surf.get_clip())
surf.scroll(2, 3)
w, h = surf.get_size()
for x in range(w):
for y in range(h):
self.failUnlessEqual(surf.get_at((x, y)),
comp.get_at((x, y)))
# Confirm keyword arguments and per-pixel alpha
spot_color = (0, 255, 0, 128)
surf = pygame.Surface((4, 4), pygame.SRCALPHA, 32)
surf.fill((255, 0, 0, 255))
surf.set_at((1, 1), spot_color)
surf.scroll(dx=1)
self.failUnlessEqual(surf.get_at((2, 1)), spot_color)
surf.scroll(dy=1)
self.failUnlessEqual(surf.get_at((2, 2)), spot_color)
surf.scroll(dy=1, dx=1)
self.failUnlessEqual(surf.get_at((3, 3)), spot_color)
surf.scroll(dx=-3, dy=-3)
self.failUnlessEqual(surf.get_at((0, 0)), spot_color)
class SurfaceSubtypeTest (unittest.TestCase):
"""Issue #280: Methods that return a new Surface preserve subclasses"""
class MySurface(pygame.Surface):
def __init__(self, *args, **kwds):
super(SurfaceSubtypeTest.MySurface, self).__init__(*args, **kwds)
self.an_attribute = True
def test_copy(self):
"""Ensure method copy() preserves the surface's class
When Surface is subclassed, the inherited copy() method will return
instances of the subclass. Non Surface fields are uncopied, however.
This includes instance attributes.
"""
ms1 = self.MySurface((32, 32), pygame.SRCALPHA, 32)
ms2 = ms1.copy()
self.assertTrue(isinstance(ms2, self.MySurface))
self.assertTrue(ms1.an_attribute)
self.assertRaises(AttributeError, getattr, ms2, "an_attribute")
def test_convert(self):
"""Ensure method convert() preserves the surface's class
When Surface is subclassed, the inherited convert() method will return
instances of the subclass. Non Surface fields are omitted, however.
This includes instance attributes.
"""
pygame.display.init()
try:
ms1 = self.MySurface((32, 32), 0, 24)
ms2 = ms1.convert(24)
self.assertTrue(ms2 is not ms1)
self.assertTrue(isinstance(ms2, self.MySurface))
self.assertTrue(ms1.an_attribute)
self.assertRaises(AttributeError, getattr, ms2, "an_attribute")
finally:
pygame.display.quit()
def test_convert_alpha(self):
"""Ensure method convert_alpha() preserves the surface's class
When Surface is subclassed, the inherited convert_alpha() method will
return instances of the subclass. Non Surface fields are omitted,
however. This includes instance attributes.
"""
pygame.display.init()
try:
pygame.display.set_mode((40, 40))
s = pygame.Surface((32, 32), pygame.SRCALPHA, 16)
ms1 = self.MySurface((32, 32), pygame.SRCALPHA, 32)
ms2 = ms1.convert_alpha(s)
self.assertTrue(ms2 is not ms1)
self.assertTrue(isinstance(ms2, self.MySurface))
self.assertTrue(ms1.an_attribute)
self.assertRaises(AttributeError, getattr, ms2, "an_attribute")
finally:
pygame.display.quit()
def test_subsurface(self):
"""Ensure method subsurface() preserves the surface's class
When Surface is subclassed, the inherited subsurface() method will
return instances of the subclass. Non Surface fields are uncopied,
however. This includes instance attributes.
"""
ms1 = self.MySurface((32, 32), pygame.SRCALPHA, 32)
ms2 = ms1.subsurface((4, 5, 10, 12))
self.assertTrue(isinstance(ms2, self.MySurface))
self.assertTrue(ms1.an_attribute)
self.assertRaises(AttributeError, getattr, ms2, "an_attribute")
class SurfaceGetBufferTest (unittest.TestCase):
# These tests requires ctypes. They are disabled if ctypes
# is not installed.
#
try:
ArrayInterface
except NameError:
__tags__ = ('ignore', 'subprocess_ignore')
lilendian = pygame.get_sdl_byteorder () == pygame.LIL_ENDIAN
def _check_interface_2D(self, s):
s_w, s_h = s.get_size()
s_bytesize = s.get_bytesize();
s_pitch = s.get_pitch()
s_pixels = s._pixels_address
# check the array interface structure fields.
v = s.get_view('2')
inter = ArrayInterface(v)
flags = PAI_ALIGNED | PAI_NOTSWAPPED | PAI_WRITEABLE
if (s.get_pitch() == s_w * s_bytesize):
flags |= PAI_FORTRAN
self.assertEqual(inter.two, 2)
self.assertEqual(inter.nd, 2)
self.assertEqual(inter.typekind, 'u')
self.assertEqual(inter.itemsize, s_bytesize)
self.assertEqual(inter.shape[0], s_w)
self.assertEqual(inter.shape[1], s_h)
self.assertEqual(inter.strides[0], s_bytesize)
self.assertEqual(inter.strides[1], s_pitch)
self.assertEqual(inter.flags, flags)
self.assertEqual(inter.data, s_pixels);
def _check_interface_3D(self, s):
s_w, s_h = s.get_size()
s_bytesize = s.get_bytesize();
s_pitch = s.get_pitch()
s_pixels = s._pixels_address
s_shifts = list(s.get_shifts())
# Check for RGB or BGR surface.
if s_shifts[0:3] == [0, 8, 16]:
if self.lilendian:
# RGB
offset = 0
step = 1
else:
# BGR
offset = s_bytesize - 1
step = -1
elif s_shifts[0:3] == [8, 16, 24]:
if self.lilendian:
# xRGB
offset = 1
step = 1
else:
# BGRx
offset = s_bytesize - 2
step = -1
elif s_shifts[0:3] == [16, 8, 0]:
if self.lilendian:
# BGR
offset = 2
step = -1
else:
# RGB
offset = s_bytesize - 3
step = 1
elif s_shifts[0:3] == [24, 16, 8]:
if self.lilendian:
# BGRx
offset = 2
step = -1
else:
# RGBx
offset = s_bytesize - 4
step = -1
else:
return
# check the array interface structure fields.
v = s.get_view('3')
inter = ArrayInterface(v)
flags = PAI_ALIGNED | PAI_NOTSWAPPED | PAI_WRITEABLE
self.assertEqual(inter.two, 2)
self.assertEqual(inter.nd, 3)
self.assertEqual(inter.typekind, 'u')
self.assertEqual(inter.itemsize, 1)
self.assertEqual(inter.shape[0], s_w)
self.assertEqual(inter.shape[1], s_h)
self.assertEqual(inter.shape[2], 3)
self.assertEqual(inter.strides[0], s_bytesize)
self.assertEqual(inter.strides[1], s_pitch)
self.assertEqual(inter.strides[2], step)
self.assertEqual(inter.flags, flags)
self.assertEqual(inter.data, s_pixels + offset);
def _check_interface_rgba(self, s, plane):
s_w, s_h = s.get_size()
s_bytesize = s.get_bytesize();
s_pitch = s.get_pitch()
s_pixels = s._pixels_address
s_shifts = s.get_shifts()
s_masks = s.get_masks()
# Find the color plane position within the pixel.
if not s_masks[plane]:
return
alpha_shift = s_shifts[plane]
offset = alpha_shift // 8
if not self.lilendian:
offset = s_bytesize - offset - 1
# check the array interface structure fields.
v = s.get_view('rgba'[plane])
inter = ArrayInterface(v)
flags = PAI_ALIGNED | PAI_NOTSWAPPED | PAI_WRITEABLE
self.assertEqual(inter.two, 2)
self.assertEqual(inter.nd, 2)
self.assertEqual(inter.typekind, 'u')
self.assertEqual(inter.itemsize, 1)
self.assertEqual(inter.shape[0], s_w)
self.assertEqual(inter.shape[1], s_h)
self.assertEqual(inter.strides[0], s_bytesize)
self.assertEqual(inter.strides[1], s_pitch)
self.assertEqual(inter.flags, flags)
self.assertEqual(inter.data, s_pixels + offset);
def test_array_interface(self):
self._check_interface_2D(pygame.Surface((5, 7), 0, 8))
self._check_interface_2D(pygame.Surface((5, 7), 0, 16))
self._check_interface_2D(pygame.Surface((5, 7), pygame.SRCALPHA, 16))
self._check_interface_3D(pygame.Surface((5, 7), 0, 24))
self._check_interface_3D(pygame.Surface((8, 4), 0, 24)) # No gaps
self._check_interface_2D(pygame.Surface((5, 7), 0, 32))
self._check_interface_3D(pygame.Surface((5, 7), 0, 32))
self._check_interface_2D(pygame.Surface((5, 7), pygame.SRCALPHA, 32))
self._check_interface_3D(pygame.Surface((5, 7), pygame.SRCALPHA, 32))
def test_array_interface_masks(self):
"""Test non-default color byte orders on 3D views"""
sz = (5, 7)
# Reversed RGB byte order
s = pygame.Surface(sz, 0, 32)
s_masks = list(s.get_masks())
masks = [0xff, 0xff00, 0xff0000]
if s_masks[0:3] == masks or s_masks[0:3] == masks[::-1]:
masks = s_masks[2::-1] + s_masks[3:4]
self._check_interface_3D(pygame.Surface(sz, 0, 32, masks))
s = pygame.Surface(sz, 0, 24)
s_masks = list(s.get_masks())
masks = [0xff, 0xff00, 0xff0000]
if s_masks[0:3] == masks or s_masks[0:3] == masks[::-1]:
masks = s_masks[2::-1] + s_masks[3:4]
self._check_interface_3D(pygame.Surface(sz, 0, 24, masks))
masks = [0xff00, 0xff0000, 0xff000000, 0]
self._check_interface_3D(pygame.Surface(sz, 0, 32, masks))
# Unsupported RGB byte orders
masks = [0xff00, 0xff, 0xff0000, 0]
self.assertRaises(ValueError,
pygame.Surface(sz, 0, 24, masks).get_view, '3')
def test_array_interface_alpha(self):
for shifts in [[0, 8, 16, 24], [8, 16, 24, 0],
[24, 16, 8, 0], [16, 8, 0, 24]]:
masks = [0xff << s for s in shifts]
s = pygame.Surface((4, 2), pygame.SRCALPHA, 32, masks)
self._check_interface_rgba(s, 3)
def test_array_interface_rgb(self):
for shifts in [[0, 8, 16, 24], [8, 16, 24, 0],
[24, 16, 8, 0], [16, 8, 0, 24]]:
masks = [0xff << s for s in shifts]
masks[3] = 0
for plane in range(3):
s = pygame.Surface((4, 2), 0, 24)
self._check_interface_rgba(s, plane)
s = pygame.Surface((4, 2), 0, 32)
self._check_interface_rgba(s, plane)
if pygame.HAVE_NEWBUF:
def test_newbuf_PyBUF_flags_bytes(self):
self.NEWBUF_test_newbuf_PyBUF_flags_bytes()
def test_newbuf_PyBUF_flags_0D(self):
self.NEWBUF_test_newbuf_PyBUF_flags_0D()
def test_newbuf_PyBUF_flags_1D(self):
self.NEWBUF_test_newbuf_PyBUF_flags_1D()
def test_newbuf_PyBUF_flags_2D(self):
self.NEWBUF_test_newbuf_PyBUF_flags_2D()
def test_newbuf_PyBUF_flags_3D(self):
self.NEWBUF_test_newbuf_PyBUF_flags_3D()
def test_newbuf_PyBUF_flags_rgba(self):
self.NEWBUF_test_newbuf_PyBUF_flags_rgba()
if is_pygame_pkg:
from pygame.tests.test_utils import buftools
else:
from test.test_utils import buftools
def NEWBUF_test_newbuf_PyBUF_flags_bytes(self):
buftools = self.buftools
Importer = buftools.Importer
s = pygame.Surface((10, 6), 0, 32)
a = s.get_buffer()
b = Importer(a, buftools.PyBUF_SIMPLE)
self.assertEqual(b.ndim, 0)
self.assertTrue(b.format is None)
self.assertEqual(b.len, a.length)
self.assertEqual(b.itemsize, 1)
self.assertTrue(b.shape is None)
self.assertTrue(b.strides is None)
self.assertTrue(b.suboffsets is None)
self.assertFalse(b.readonly)
self.assertEqual(b.buf, s._pixels_address)
b = Importer(a, buftools.PyBUF_WRITABLE)
self.assertEqual(b.ndim, 0)
self.assertTrue(b.format is None)
self.assertFalse(b.readonly)
b = Importer(a, buftools.PyBUF_FORMAT)
self.assertEqual(b.ndim, 0)
self.assertEqual(b.format, 'B')
b = Importer(a, buftools.PyBUF_ND)
self.assertEqual(b.ndim, 1)
self.assertTrue(b.format is None)
self.assertEqual(b.len, a.length)
self.assertEqual(b.itemsize, 1)
self.assertEqual(b.shape, (a.length,))
self.assertTrue(b.strides is None)
self.assertTrue(b.suboffsets is None)
self.assertFalse(b.readonly)
self.assertEqual(b.buf, s._pixels_address)
b = Importer(a, buftools.PyBUF_STRIDES)
self.assertEqual(b.ndim, 1)
self.assertTrue(b.format is None)
self.assertEqual(b.strides, (1,))
s2 = s.subsurface((1, 1, 7, 4)) # Not contiguous
a = s2.get_buffer()
b = Importer(a, buftools.PyBUF_SIMPLE)
self.assertEqual(b.ndim, 0)
self.assertTrue(b.format is None)
self.assertEqual(b.len, a.length)
self.assertEqual(b.itemsize, 1)
self.assertTrue(b.shape is None)
self.assertTrue(b.strides is None)
self.assertTrue(b.suboffsets is None)
self.assertFalse(b.readonly)
self.assertEqual(b.buf, s2._pixels_address)
b = Importer(a, buftools.PyBUF_C_CONTIGUOUS)
self.assertEqual(b.ndim, 1)
self.assertEqual(b.strides, (1,))
b = Importer(a, buftools.PyBUF_F_CONTIGUOUS)
self.assertEqual(b.ndim, 1)
self.assertEqual(b.strides, (1,))
b = Importer(a, buftools.PyBUF_ANY_CONTIGUOUS)
self.assertEqual(b.ndim, 1)
self.assertEqual(b.strides, (1,))
def NEWBUF_test_newbuf_PyBUF_flags_0D(self):
# This is the same handler as used by get_buffer(), so just
# confirm that it succeeds for one case.
buftools = self.buftools
Importer = buftools.Importer
s = pygame.Surface((10, 6), 0, 32)
a = s.get_view('0')
b = Importer(a, buftools.PyBUF_SIMPLE)
self.assertEqual(b.ndim, 0)
self.assertTrue(b.format is None)
self.assertEqual(b.len, a.length)
self.assertEqual(b.itemsize, 1)
self.assertTrue(b.shape is None)
self.assertTrue(b.strides is None)
self.assertTrue(b.suboffsets is None)
self.assertFalse(b.readonly)
self.assertEqual(b.buf, s._pixels_address)
def NEWBUF_test_newbuf_PyBUF_flags_1D(self):
buftools = self.buftools
Importer = buftools.Importer
s = pygame.Surface((10, 6), 0, 32)
a = s.get_view('1')
b = Importer(a, buftools.PyBUF_SIMPLE)
self.assertEqual(b.ndim, 0)
self.assertTrue(b.format is None)
self.assertEqual(b.len, a.length)
self.assertEqual(b.itemsize, s.get_bytesize())
self.assertTrue(b.shape is None)
self.assertTrue(b.strides is None)
self.assertTrue(b.suboffsets is None)
self.assertFalse(b.readonly)
self.assertEqual(b.buf, s._pixels_address)
b = Importer(a, buftools.PyBUF_WRITABLE)
self.assertEqual(b.ndim, 0)
self.assertTrue(b.format is None)
self.assertFalse(b.readonly)
b = Importer(a, buftools.PyBUF_FORMAT)
self.assertEqual(b.ndim, 0)
self.assertEqual(b.format, '=I')
b = Importer(a, buftools.PyBUF_ND)
self.assertEqual(b.ndim, 1)
self.assertTrue(b.format is None)
self.assertEqual(b.len, a.length)
self.assertEqual(b.itemsize, s.get_bytesize())
self.assertEqual(b.shape, (s.get_width() * s.get_height(),))
self.assertTrue(b.strides is None)
self.assertTrue(b.suboffsets is None)
self.assertFalse(b.readonly)
self.assertEqual(b.buf, s._pixels_address)
b = Importer(a, buftools.PyBUF_STRIDES)
self.assertEqual(b.ndim, 1)
self.assertTrue(b.format is None)
self.assertEqual(b.strides, (s.get_bytesize(),))
def NEWBUF_test_newbuf_PyBUF_flags_2D(self):
buftools = self.buftools
Importer = buftools.Importer
s = pygame.Surface((10, 6), 0, 32)
a = s.get_view('2')
# Non dimensional requests, no PyDEF_ND, are handled by the
# 1D surface buffer code, so only need to confirm a success.
b = Importer(a, buftools.PyBUF_SIMPLE)
self.assertEqual(b.ndim, 0)
self.assertTrue(b.format is None)
self.assertEqual(b.len, a.length)
self.assertEqual(b.itemsize, s.get_bytesize())
self.assertTrue(b.shape is None)
self.assertTrue(b.strides is None)
self.assertTrue(b.suboffsets is None)
self.assertFalse(b.readonly)
self.assertEqual(b.buf, s._pixels_address)
# Uniquely 2D
b = Importer(a, buftools.PyBUF_STRIDES)
self.assertEqual(b.ndim, 2)
self.assertTrue(b.format is None)
self.assertEqual(b.len, a.length)
self.assertEqual(b.itemsize, s.get_bytesize())
self.assertEqual(b.shape, s.get_size())
self.assertEqual(b.strides, (s.get_bytesize(), s.get_pitch()))
self.assertTrue(b.suboffsets is None)
self.assertFalse(b.readonly)
self.assertEqual(b.buf, s._pixels_address)
b = Importer(a, buftools.PyBUF_RECORDS_RO)
self.assertEqual(b.ndim, 2)
self.assertEqual(b.format, '=I')
self.assertEqual(b.strides, (s.get_bytesize(), s.get_pitch()))
b = Importer(a, buftools.PyBUF_RECORDS)
self.assertEqual(b.ndim, 2)
self.assertEqual(b.format, '=I')
self.assertEqual(b.strides, (s.get_bytesize(), s.get_pitch()))
b = Importer(a, buftools.PyBUF_F_CONTIGUOUS)
self.assertEqual(b.ndim, 2)
self.assertEqual(b.format, None)
self.assertEqual(b.strides, (s.get_bytesize(), s.get_pitch()))
b = Importer(a, buftools.PyBUF_ANY_CONTIGUOUS)
self.assertEqual(b.ndim, 2)
self.assertEqual(b.format, None)
self.assertEqual(b.strides, (s.get_bytesize(), s.get_pitch()))
self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ND)
self.assertRaises(BufferError, Importer, a,
buftools.PyBUF_C_CONTIGUOUS)
s2 = s.subsurface((1, 1, 7, 4)) # Not contiguous
a = s2.get_view('2')
b = Importer(a, buftools.PyBUF_STRIDES)
self.assertEqual(b.ndim, 2)
self.assertTrue(b.format is None)
self.assertEqual(b.len, a.length)
self.assertEqual(b.itemsize, s2.get_bytesize())
self.assertEqual(b.shape, s2.get_size())
self.assertEqual(b.strides, (s2.get_bytesize(), s.get_pitch()))
self.assertTrue(b.suboffsets is None)
self.assertFalse(b.readonly)
self.assertEqual(b.buf, s2._pixels_address)
b = Importer(a, buftools.PyBUF_RECORDS)
self.assertEqual(b.ndim, 2)
self.assertEqual(b.format, '=I')
self.assertRaises(BufferError, Importer, a, buftools.PyBUF_SIMPLE)
self.assertRaises(BufferError, Importer, a, buftools.PyBUF_FORMAT)
self.assertRaises(BufferError, Importer, a,
buftools.PyBUF_WRITABLE)
self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ND)
self.assertRaises(BufferError, Importer, a,
buftools.PyBUF_C_CONTIGUOUS)
self.assertRaises(BufferError, Importer, a,
buftools.PyBUF_F_CONTIGUOUS)
self.assertRaises(BufferError, Importer, a,
buftools.PyBUF_ANY_CONTIGUOUS)
def NEWBUF_test_newbuf_PyBUF_flags_3D(self):
buftools = self.buftools
Importer = buftools.Importer
s = pygame.Surface((12, 6), 0, 24)
rmask, gmask, bmask, amask = s.get_masks()
if (self.lilendian):
if (rmask == 0x0000ff):
color_step = 1
addr_offset = 0
else:
color_step = -1
addr_offset = 2
else:
if (rmask == 0xff0000):
color_step = 1
addr_offset = 0
else:
color_step = -1
addr_offset = 2
a = s.get_view('3')
b = Importer(a, buftools.PyBUF_STRIDES)
w, h = s.get_size()
shape = w, h, 3
strides = 3, s.get_pitch(), color_step
self.assertEqual(b.ndim, 3)
self.assertTrue(b.format is None)
self.assertEqual(b.len, a.length)
self.assertEqual(b.itemsize, 1)
self.assertEqual(b.shape, shape)
self.assertEqual(b.strides, strides)
self.assertTrue(b.suboffsets is None)
self.assertFalse(b.readonly)
self.assertEqual(b.buf, s._pixels_address + addr_offset)
b = Importer(a, buftools.PyBUF_RECORDS_RO)
self.assertEqual(b.ndim, 3)
self.assertEqual(b.format, 'B')
self.assertEqual(b.strides, strides)
b = Importer(a, buftools.PyBUF_RECORDS)
self.assertEqual(b.ndim, 3)
self.assertEqual(b.format, 'B')
self.assertEqual(b.strides, strides)
self.assertRaises(BufferError, Importer, a, buftools.PyBUF_SIMPLE)
self.assertRaises(BufferError, Importer, a, buftools.PyBUF_FORMAT)
self.assertRaises(BufferError, Importer, a, buftools.PyBUF_WRITABLE)
self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ND)
self.assertRaises(BufferError, Importer, a,
buftools.PyBUF_C_CONTIGUOUS)
self.assertRaises(BufferError, Importer, a,
buftools.PyBUF_F_CONTIGUOUS)
self.assertRaises(BufferError, Importer, a,
buftools.PyBUF_ANY_CONTIGUOUS)
def NEWBUF_test_newbuf_PyBUF_flags_rgba(self):
# All color plane views are handled by the same routine,
# so only one plane need be checked.
buftools = self.buftools
Importer = buftools.Importer
s = pygame.Surface((12, 6), 0, 24)
rmask, gmask, bmask, amask = s.get_masks()
if (self.lilendian):
if (rmask == 0x0000ff):
addr_offset = 0
else:
addr_offset = 2
else:
if (rmask == 0xff0000):
addr_offset = 0
else:
addr_offset = 2
a = s.get_view('R')
b = Importer(a, buftools.PyBUF_STRIDES)
w, h = s.get_size()
shape = w, h
strides = s.get_bytesize(), s.get_pitch()
self.assertEqual(b.ndim, 2)
self.assertTrue(b.format is None)
self.assertEqual(b.len, a.length)
self.assertEqual(b.itemsize, 1)
self.assertEqual(b.shape, shape)
self.assertEqual(b.strides, strides)
self.assertTrue(b.suboffsets is None)
self.assertFalse(b.readonly)
self.assertEqual(b.buf, s._pixels_address + addr_offset)
b = Importer(a, buftools.PyBUF_RECORDS_RO)
self.assertEqual(b.ndim, 2)
self.assertEqual(b.format, 'B')
self.assertEqual(b.strides, strides)
b = Importer(a, buftools.PyBUF_RECORDS)
self.assertEqual(b.ndim, 2)
self.assertEqual(b.format, 'B')
self.assertEqual(b.strides, strides)
self.assertRaises(BufferError, Importer, a, buftools.PyBUF_SIMPLE)
self.assertRaises(BufferError, Importer, a, buftools.PyBUF_FORMAT)
self.assertRaises(BufferError, Importer, a, buftools.PyBUF_WRITABLE)
self.assertRaises(BufferError, Importer, a, buftools.PyBUF_ND)
self.assertRaises(BufferError, Importer, a,
buftools.PyBUF_C_CONTIGUOUS)
self.assertRaises(BufferError, Importer, a,
buftools.PyBUF_F_CONTIGUOUS)
self.assertRaises(BufferError, Importer, a,
buftools.PyBUF_ANY_CONTIGUOUS)
class SurfaceBlendTest (unittest.TestCase):
_test_palette = [(0, 0, 0, 255),
(10, 30, 60, 0),
(25, 75, 100, 128),
(200, 150, 100, 200),
(0, 100, 200, 255)]
surf_size = (10, 12)
_test_points = [((0, 0), 1), ((4, 5), 1), ((9, 0), 2),
((5, 5), 2), ((0, 11), 3), ((4, 6), 3),
((9, 11), 4), ((5, 6), 4)]
def _make_surface(self, bitsize, srcalpha=False, palette=None):
if palette is None:
palette = self._test_palette
flags = 0
if srcalpha:
flags |= SRCALPHA
surf = pygame.Surface(self.surf_size, flags, bitsize)
if bitsize == 8:
surf.set_palette([c[:3] for c in palette])
return surf
def _fill_surface(self, surf, palette=None):
if palette is None:
palette = self._test_palette
surf.fill(palette[1], (0, 0, 5, 6))
surf.fill(palette[2], (5, 0, 5, 6))
surf.fill(palette[3], (0, 6, 5, 6))
surf.fill(palette[4], (5, 6, 5, 6))
def _make_src_surface(self, bitsize, srcalpha=False, palette=None):
surf = self._make_surface(bitsize, srcalpha, palette)
self._fill_surface(surf, palette)
return surf
def _assert_surface(self, surf, palette=None, msg=""):
if palette is None:
palette = self._test_palette
if surf.get_bitsize() == 16:
palette = [surf.unmap_rgb(surf.map_rgb(c)) for c in palette]
for posn, i in self._test_points:
self.failUnlessEqual(surf.get_at(posn), palette[i],
"%s != %s: flags: %i, bpp: %i, posn: %s%s" %
(surf.get_at(posn),
palette[i], surf.get_flags(),
surf.get_bitsize(), posn, msg))
def setUp(self):
# Needed for 8 bits-per-pixel color palette surface tests.
pygame.init()
def tearDown(self):
pygame.quit()
def test_blit_blend(self):
sources = [self._make_src_surface(8),
self._make_src_surface(16),
self._make_src_surface(16, srcalpha=True),
self._make_src_surface(24),
self._make_src_surface(32),
self._make_src_surface(32, srcalpha=True)]
destinations = [self._make_surface(8),
self._make_surface(16),
self._make_surface(16, srcalpha=True),
self._make_surface(24),
self._make_surface(32),
self._make_surface(32, srcalpha=True)]
blend = [('BLEND_ADD', (0, 25, 100, 255),
lambda a, b: min(a + b, 255)),
('BLEND_SUB', (100, 25, 0, 100),
lambda a, b: max(a - b, 0)),
('BLEND_MULT', (100, 200, 0, 0),
lambda a, b: (a * b) // 256),
('BLEND_MIN', (255, 0, 0, 255), min),
('BLEND_MAX', (0, 255, 0, 255), max)]
for src in sources:
src_palette = [src.unmap_rgb(src.map_rgb(c))
for c in self._test_palette]
for dst in destinations:
for blend_name, dst_color, op in blend:
dc = dst.unmap_rgb(dst.map_rgb(dst_color))
p = []
for sc in src_palette:
c = [op(dc[i], sc[i]) for i in range(3)]
if dst.get_masks()[3]:
c.append(dc[3])
else:
c.append(255)
c = dst.unmap_rgb(dst.map_rgb(c))
p.append(c)
dst.fill(dst_color)
dst.blit(src,
(0, 0),
special_flags=getattr(pygame, blend_name))
self._assert_surface(dst, p,
(", op: %s, src bpp: %i"
", src flags: %i" %
(blend_name,
src.get_bitsize(),
src.get_flags())))
src = self._make_src_surface(32)
masks = src.get_masks()
dst = pygame.Surface(src.get_size(), 0, 32,
[masks[1], masks[2], masks[0], masks[3]])
for blend_name, dst_color, op in blend:
p = []
for src_color in self._test_palette:
c = [op(dst_color[i], src_color[i]) for i in range(3)]
c.append(255)
p.append(tuple(c))
dst.fill(dst_color)
dst.blit(src,
(0, 0),
special_flags=getattr(pygame, blend_name))
self._assert_surface(dst, p, ", %s" % blend_name)
# Blend blits are special cased for 32 to 32 bit surfaces.
#
# Confirm that it works when the rgb bytes are not the
# least significant bytes.
pat = self._make_src_surface(32)
masks = pat.get_masks()
if min(masks) == intify(0xFF000000):
masks = [longify(m) >> 8 for m in masks]
else:
masks = [intify(m << 8) for m in masks]
src = pygame.Surface(pat.get_size(), 0, 32, masks)
self._fill_surface(src)
dst = pygame.Surface(src.get_size(), 0, 32, masks)
for blend_name, dst_color, op in blend:
p = []
for src_color in self._test_palette:
c = [op(dst_color[i], src_color[i]) for i in range(3)]
c.append(255)
p.append(tuple(c))
dst.fill(dst_color)
dst.blit(src,
(0, 0),
special_flags=getattr(pygame, blend_name))
self._assert_surface(dst, p, ", %s" % blend_name)
def test_blit_blend_rgba(self):
sources = [self._make_src_surface(8),
self._make_src_surface(16),
self._make_src_surface(16, srcalpha=True),
self._make_src_surface(24),
self._make_src_surface(32),
self._make_src_surface(32, srcalpha=True)]
destinations = [self._make_surface(8),
self._make_surface(16),
self._make_surface(16, srcalpha=True),
self._make_surface(24),
self._make_surface(32),
self._make_surface(32, srcalpha=True)]
blend = [('BLEND_RGBA_ADD', (0, 25, 100, 255),
lambda a, b: min(a + b, 255)),
('BLEND_RGBA_SUB', (0, 25, 100, 255),
lambda a, b: max(a - b, 0)),
('BLEND_RGBA_MULT', (0, 7, 100, 255),
lambda a, b: (a * b) // 256),
('BLEND_RGBA_MIN', (0, 255, 0, 255), min),
('BLEND_RGBA_MAX', (0, 255, 0, 255), max)]
for src in sources:
src_palette = [src.unmap_rgb(src.map_rgb(c))
for c in self._test_palette]
for dst in destinations:
for blend_name, dst_color, op in blend:
dc = dst.unmap_rgb(dst.map_rgb(dst_color))
p = []
for sc in src_palette:
c = [op(dc[i], sc[i]) for i in range(4)]
if not dst.get_masks()[3]:
c[3] = 255
c = dst.unmap_rgb(dst.map_rgb(c))
p.append(c)
dst.fill(dst_color)
dst.blit(src,
(0, 0),
special_flags=getattr(pygame, blend_name))
self._assert_surface(dst, p,
(", op: %s, src bpp: %i"
", src flags: %i" %
(blend_name,
src.get_bitsize(),
src.get_flags())))
# Blend blits are special cased for 32 to 32 bit surfaces
# with per-pixel alpha.
#
# Confirm the general case is used instead when the formats differ.
src = self._make_src_surface(32, srcalpha=True)
masks = src.get_masks()
dst = pygame.Surface(src.get_size(), SRCALPHA, 32,
(masks[1], masks[2], masks[3], masks[0]))
for blend_name, dst_color, op in blend:
p = [tuple([op(dst_color[i], src_color[i]) for i in range(4)])
for src_color in self._test_palette]
dst.fill(dst_color)
dst.blit(src,
(0, 0),
special_flags=getattr(pygame, blend_name))
self._assert_surface(dst, p, ", %s" % blend_name)
# Confirm this special case handles subsurfaces.
src = pygame.Surface((8, 10), SRCALPHA, 32)
dst = pygame.Surface((8, 10), SRCALPHA, 32)
tst = pygame.Surface((8, 10), SRCALPHA, 32)
src.fill((1, 2, 3, 4))
dst.fill((40, 30, 20, 10))
subsrc = src.subsurface((2, 3, 4, 4))
try:
subdst = dst.subsurface((2, 3, 4, 4))
try:
subdst.blit(subsrc, (0, 0), special_flags=BLEND_RGBA_ADD)
finally:
del subdst
finally:
del subsrc
tst.fill((40, 30, 20, 10))
tst.fill((41, 32, 23, 14), (2, 3, 4, 4))
for x in range(8):
for y in range(10):
self.failUnlessEqual(dst.get_at((x, y)), tst.get_at((x, y)),
"%s != %s at (%i, %i)" %
(dst.get_at((x, y)), tst.get_at((x, y)),
x, y))
def test_blit_blend_big_rect(self):
""" test that an oversized rect works ok.
"""
color = (1, 2, 3, 255)
area = (1, 1, 30, 30)
s1 = pygame.Surface((4, 4), 0, 32)
r = s1.fill(special_flags=pygame.BLEND_ADD, color=color, rect=area)
self.assertEquals(pygame.Rect((1, 1, 3, 3)), r)
self.assert_(s1.get_at((0, 0)) == (0, 0, 0, 255))
self.assert_(s1.get_at((1, 1)) == color)
black = pygame.Color("black")
red = pygame.Color("red")
self.assertNotEqual(black, red)
surf = pygame.Surface((10, 10), 0, 32)
surf.fill(black)
subsurf = surf.subsurface(pygame.Rect(0, 1, 10, 8))
self.assertEqual(surf.get_at((0, 0)), black)
self.assertEqual(surf.get_at((0, 9)), black)
subsurf.fill(red, (0, -1, 10, 1), pygame.BLEND_RGB_ADD)
self.assertEqual(surf.get_at((0, 0)), black)
self.assertEqual(surf.get_at((0, 9)), black)
subsurf.fill(red, (0, 8, 10, 1), pygame.BLEND_RGB_ADD)
self.assertEqual(surf.get_at((0, 0)), black)
self.assertEqual(surf.get_at((0, 9)), black)
def test_GET_PIXELVALS(self):
# surface.h GET_PIXELVALS bug regarding whether of not
# a surface has per-pixel alpha. Looking at the Amask
# is not enough. The surface's SRCALPHA flag must also
# be considered. Fix rev. 1923.
src = self._make_surface(32, srcalpha=True)
src.fill((0, 0, 0, 128))
src.set_alpha(None) # Clear SRCALPHA flag.
dst = self._make_surface(32, srcalpha=True)
dst.blit(src, (0, 0), special_flags=BLEND_RGBA_ADD)
self.failUnlessEqual(dst.get_at((0, 0)), (0, 0, 0, 255))
def test_fill_blend(self):
destinations = [self._make_surface(8),
self._make_surface(16),
self._make_surface(16, srcalpha=True),
self._make_surface(24),
self._make_surface(32),
self._make_surface(32, srcalpha=True)]
blend = [('BLEND_ADD', (0, 25, 100, 255),
lambda a, b: min(a + b, 255)),
('BLEND_SUB', (0, 25, 100, 255),
lambda a, b: max(a - b, 0)),
('BLEND_MULT', (0, 7, 100, 255),
lambda a, b: (a * b) // 256),
('BLEND_MIN', (0, 255, 0, 255), min),
('BLEND_MAX', (0, 255, 0, 255), max)]
for dst in destinations:
dst_palette = [dst.unmap_rgb(dst.map_rgb(c))
for c in self._test_palette]
for blend_name, fill_color, op in blend:
fc = dst.unmap_rgb(dst.map_rgb(fill_color))
self._fill_surface(dst)
p = []
for dc in dst_palette:
c = [op(dc[i], fc[i]) for i in range(3)]
if dst.get_masks()[3]:
c.append(dc[3])
else:
c.append(255)
c = dst.unmap_rgb(dst.map_rgb(c))
p.append(c)
dst.fill(fill_color, special_flags=getattr(pygame, blend_name))
self._assert_surface(dst, p, ", %s" % blend_name)
def test_fill_blend_rgba(self):
destinations = [self._make_surface(8),
self._make_surface(16),
self._make_surface(16, srcalpha=True),
self._make_surface(24),
self._make_surface(32),
self._make_surface(32, srcalpha=True)]
blend = [('BLEND_RGBA_ADD', (0, 25, 100, 255),
lambda a, b: min(a + b, 255)),
('BLEND_RGBA_SUB', (0, 25, 100, 255),
lambda a, b: max(a - b, 0)),
('BLEND_RGBA_MULT', (0, 7, 100, 255),
lambda a, b: (a * b) // 256),
('BLEND_RGBA_MIN', (0, 255, 0, 255), min),
('BLEND_RGBA_MAX', (0, 255, 0, 255), max)]
for dst in destinations:
dst_palette = [dst.unmap_rgb(dst.map_rgb(c))
for c in self._test_palette]
for blend_name, fill_color, op in blend:
fc = dst.unmap_rgb(dst.map_rgb(fill_color))
self._fill_surface(dst)
p = []
for dc in dst_palette:
c = [op(dc[i], fc[i]) for i in range(4)]
if not dst.get_masks()[3]:
c[3] = 255
c = dst.unmap_rgb(dst.map_rgb(c))
p.append(c)
dst.fill(fill_color, special_flags=getattr(pygame, blend_name))
self._assert_surface(dst, p, ", %s" % blend_name)
class SurfaceSelfBlitTest(unittest.TestCase):
"""Blit to self tests.
This test case is in response to MotherHamster Bugzilla Bug 19.
"""
_test_palette = [(0, 0, 0, 255),
(255, 0, 0, 0),
(0, 255, 0, 255)]
surf_size = (9, 6)
def _fill_surface(self, surf, palette=None):
if palette is None:
palette = self._test_palette
surf.fill(palette[1])
surf.fill(palette[2], (1, 2, 1, 2))
def _make_surface(self, bitsize, srcalpha=False, palette=None):
if palette is None:
palette = self._test_palette
flags = 0
if srcalpha:
flags |= SRCALPHA
surf = pygame.Surface(self.surf_size, flags, bitsize)
if bitsize == 8:
surf.set_palette([c[:3] for c in palette])
self._fill_surface(surf, palette)
return surf
def _assert_same(self, a, b):
w, h = a.get_size()
for x in range(w):
for y in range(h):
self.failUnlessEqual(a.get_at((x, y)), b.get_at((x, y)),
("%s != %s, bpp: %i" %
(a.get_at((x, y)), b.get_at((x, y)),
a.get_bitsize())))
def setUp(self):
# Needed for 8 bits-per-pixel color palette surface tests.
pygame.init()
def tearDown(self):
pygame.quit()
def test_overlap_check(self):
# Ensure overlapping blits are properly detected. There are two
# places where this is done, within SoftBlitPyGame() in alphablit.c
# and PySurface_Blit() in surface.c. SoftBlitPyGame should catch the
# per-pixel alpha surface, PySurface_Blit the colorkey and blanket
# alpha surface. per-pixel alpha and blanket alpha self blits are
# not properly handled by SDL 1.2.13, so Pygame does them.
bgc = (0, 0, 0, 255)
rectc_left = (128, 64, 32, 255)
rectc_right = (255, 255, 255, 255)
colors = [(255, 255, 255, 255), (128, 64, 32, 255)]
overlaps = [(0, 0, 1, 0, (50, 0)),
(0, 0, 49, 1, (98, 2)),
(0, 0, 49, 49, (98, 98)),
(49, 0, 0, 1, (0, 2)),
(49, 0, 0, 49, (0, 98))]
surfs = [pygame.Surface((100, 100), SRCALPHA, 32)]
surf = pygame.Surface((100, 100), 0, 32)
surf.set_alpha(255)
surfs.append(surf)
surf = pygame.Surface((100, 100), 0, 32)
surf.set_colorkey((0, 1, 0))
surfs.append(surf)
for surf in surfs:
for s_x, s_y, d_x, d_y, test_posn in overlaps:
surf.fill(bgc)
surf.fill(rectc_right, (25, 0, 25, 50))
surf.fill(rectc_left, (0, 0, 25, 50))
surf.blit(surf, (d_x, d_y), (s_x, s_y, 50, 50))
self.failUnlessEqual(surf.get_at(test_posn), rectc_right)
def test_colorkey(self):
# Check a workaround for an SDL 1.2.13 surface self-blit problem
# (MotherHamster Bugzilla bug 19).
pygame.display.set_mode((100, 50)) # Needed for 8bit surface
bitsizes = [8, 16, 24, 32]
for bitsize in bitsizes:
surf = self._make_surface(bitsize)
surf.set_colorkey(self._test_palette[1])
surf.blit(surf, (3, 0))
p = []
for c in self._test_palette:
c = surf.unmap_rgb(surf.map_rgb(c))
p.append(c)
p[1] = (p[1][0], p[1][1], p[1][2], 0)
tmp = self._make_surface(32, srcalpha=True, palette=p)
tmp.blit(tmp, (3, 0))
tmp.set_alpha(None)
comp = self._make_surface(bitsize)
comp.blit(tmp, (0, 0))
self._assert_same(surf, comp)
def test_blanket_alpha(self):
# Check a workaround for an SDL 1.2.13 surface self-blit problem
# (MotherHamster Bugzilla bug 19).
pygame.display.set_mode((100, 50)) # Needed for 8bit surface
bitsizes = [8, 16, 24, 32]
for bitsize in bitsizes:
surf = self._make_surface(bitsize)
surf.set_alpha(128)
surf.blit(surf, (3, 0))
p = []
for c in self._test_palette:
c = surf.unmap_rgb(surf.map_rgb(c))
p.append((c[0], c[1], c[2], 128))
tmp = self._make_surface(32, srcalpha=True, palette=p)
tmp.blit(tmp, (3, 0))
tmp.set_alpha(None)
comp = self._make_surface(bitsize)
comp.blit(tmp, (0, 0))
self._assert_same(surf, comp)
def test_pixel_alpha(self):
bitsizes = [16, 32]
for bitsize in bitsizes:
surf = self._make_surface(bitsize, srcalpha=True)
comp = self._make_surface(bitsize, srcalpha=True)
comp.blit(surf, (3, 0))
surf.blit(surf, (3, 0))
self._assert_same(surf, comp)
def test_blend(self):
bitsizes = [8, 16, 24, 32]
blends = ['BLEND_ADD',
'BLEND_SUB',
'BLEND_MULT',
'BLEND_MIN',
'BLEND_MAX']
for bitsize in bitsizes:
surf = self._make_surface(bitsize)
comp = self._make_surface(bitsize)
for blend in blends:
self._fill_surface(surf)
self._fill_surface(comp)
comp.blit(surf, (3, 0),
special_flags=getattr(pygame, blend))
surf.blit(surf, (3, 0),
special_flags=getattr(pygame, blend))
self._assert_same(surf, comp)
def test_blend_rgba(self):
bitsizes = [16, 32]
blends = ['BLEND_RGBA_ADD',
'BLEND_RGBA_SUB',
'BLEND_RGBA_MULT',
'BLEND_RGBA_MIN',
'BLEND_RGBA_MAX']
for bitsize in bitsizes:
surf = self._make_surface(bitsize, srcalpha=True)
comp = self._make_surface(bitsize, srcalpha=True)
for blend in blends:
self._fill_surface(surf)
self._fill_surface(comp)
comp.blit(surf, (3, 0),
special_flags=getattr(pygame, blend))
surf.blit(surf, (3, 0),
special_flags=getattr(pygame, blend))
self._assert_same(surf, comp)
def test_subsurface(self):
# Blitting a surface to its subsurface is allowed.
surf = self._make_surface(32, srcalpha=True)
comp = surf.copy()
comp.blit(surf, (3, 0))
sub = surf.subsurface((3, 0, 6, 6))
sub.blit(surf, (0, 0))
del sub
self._assert_same(surf, comp)
# Blitting a subsurface to its owner is forbidden because of
# lock conficts. This limitation allows the overlap check
# in PySurface_Blit of alphablit.c to be simplified.
def do_blit(d, s):
d.blit(s, (0, 0))
sub = surf.subsurface((1, 1, 2, 2))
self.failUnlessRaises(pygame.error, do_blit, surf, sub)
class SurfaceFillTest(unittest.TestCase):
def test_fill(self):
pygame.init()
try:
screen = pygame.display.set_mode((640, 480))
# Green and blue test pattern
screen.fill((0, 255, 0), (0, 0, 320, 240))
screen.fill((0, 255, 0), (320, 240, 320, 240))
screen.fill((0, 0, 255), (320, 0, 320, 240))
screen.fill((0, 0, 255), (0, 240, 320, 240))
# Now apply a clip rect, such that only the left side of the
# screen should be effected by blit opperations.
screen.set_clip((0, 0, 320, 480))
# Test fills with each special flag, and additionaly without any.
screen.fill((255, 0, 0, 127), (160, 0, 320, 30), 0)
screen.fill((255, 0, 0, 127), (160, 30, 320, 30), pygame.BLEND_ADD)
screen.fill((0, 127, 127, 127), (160, 60, 320, 30), pygame.BLEND_SUB)
screen.fill((0, 63, 63, 127), (160, 90, 320, 30), pygame.BLEND_MULT)
screen.fill((0, 127, 127, 127), (160, 120, 320, 30), pygame.BLEND_MIN)
screen.fill((127, 0, 0, 127), (160, 150, 320, 30), pygame.BLEND_MAX)
screen.fill((255, 0, 0, 127), (160, 180, 320, 30), pygame.BLEND_RGBA_ADD)
screen.fill((0, 127, 127, 127), (160, 210, 320, 30), pygame.BLEND_RGBA_SUB)
screen.fill((0, 63, 63, 127), (160, 240, 320, 30), pygame.BLEND_RGBA_MULT)
screen.fill((0, 127, 127, 127), (160, 270, 320, 30), pygame.BLEND_RGBA_MIN)
screen.fill((127, 0, 0, 127), (160, 300, 320, 30), pygame.BLEND_RGBA_MAX)
screen.fill((255, 0, 0, 127), (160, 330, 320, 30), pygame.BLEND_RGB_ADD)
screen.fill((0, 127, 127, 127), (160, 360, 320, 30), pygame.BLEND_RGB_SUB)
screen.fill((0, 63, 63, 127), (160, 390, 320, 30), pygame.BLEND_RGB_MULT)
screen.fill((0, 127, 127, 127), (160, 420, 320, 30), pygame.BLEND_RGB_MIN)
screen.fill((255, 0, 0, 127), (160, 450, 320, 30), pygame.BLEND_RGB_MAX)
# Update the display so we can see the results
pygame.display.flip()
# Compare colors on both sides of window
y = 5
while y < 480:
self.assertEquals(screen.get_at((10, y)),
screen.get_at((330, 480 - y)))
y += 10
finally:
pygame.quit()
if __name__ == '__main__':
unittest.main()
| mit |
juliomiguel1/redsocialclase | src/main/webapp/js/directives.js | 2449 | /*
* Copyright (c) 2015 by Rafael Angel Aznar Aparici (rafaaznar at gmail dot com)
*
* openAUSIAS: The stunning micro-library that helps you to develop easily
* AJAX web applications by using Java and jQuery
* openAUSIAS is distributed under the MIT License (MIT)
* Sources at https://github.com/rafaelaznar/openAUSIAS
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
'use strict';
angular.module('Directives', []).
directive('appVersion', ['version', function (version) {
return function (scope, elm, attrs) {
elm.text(version);
};
}]);
angular.module('Directives', []).
directive('datetimez', function () {
return {
restrict: 'A',
require: 'ngModel',
link: function (scope, element, attrs, ngModelCtrl) {
element.datetimepicker({
dateFormat: 'dd-MM-yyyy',
language: 'en',
pickTime: false,
startDate: '01-11-2013', // set a minimum date
endDate: '01-11-2030' // set a maximum date
}).on('changeDate', function (e) {
ngModelCtrl.$setViewValue(e.date);
scope.$apply();
});
}
};
});
| mit |
codeck/XChange | xchange-coinsetter/src/main/java/com/xeiam/xchange/coinsetter/service/polling/CoinsetterPriceAlertServiceRaw.java | 1736 | package com.xeiam.xchange.coinsetter.service.polling;
import java.io.IOException;
import java.util.UUID;
import com.xeiam.xchange.Exchange;
import com.xeiam.xchange.coinsetter.CoinsetterException;
import com.xeiam.xchange.coinsetter.dto.pricealert.request.CoinsetterPriceAlertRequest;
import com.xeiam.xchange.coinsetter.dto.pricealert.response.CoinsetterPriceAlertList;
import com.xeiam.xchange.coinsetter.dto.pricealert.response.CoinsetterPriceAlertResponse;
import com.xeiam.xchange.coinsetter.dto.pricealert.response.CoinsetterRemovePriceAlertResponse;
import com.xeiam.xchange.service.BaseExchangeService;
import si.mazi.rescu.RestProxyFactory;
/**
* Price alert raw service.
*/
public class CoinsetterPriceAlertServiceRaw extends BaseExchangeService {
private final com.xeiam.xchange.coinsetter.rs.CoinsetterPriceAlert priceAlert;
/**
* Constructor
*
* @param exchange
*/
public CoinsetterPriceAlertServiceRaw(Exchange exchange) {
super(exchange);
String baseUrl = exchange.getExchangeSpecification().getSslUri();
priceAlert = RestProxyFactory.createProxy(com.xeiam.xchange.coinsetter.rs.CoinsetterPriceAlert.class, baseUrl);
}
public CoinsetterPriceAlertResponse add(UUID clientSessionId, CoinsetterPriceAlertRequest request) throws CoinsetterException, IOException {
return priceAlert.add(clientSessionId, request);
}
public CoinsetterPriceAlertList list(UUID clientSessionId) throws CoinsetterException, IOException {
return priceAlert.list(clientSessionId);
}
public CoinsetterRemovePriceAlertResponse remove(UUID clientSessionId, UUID priceAlertId) throws CoinsetterException, IOException {
return priceAlert.remove(clientSessionId, priceAlertId);
}
}
| mit |
fmustaf/Samples | eShopOnContainers-dev/test/Services/FunctionalTests/Extensions/HttpClientExtensions.cs | 485 | using Microsoft.AspNetCore.TestHost;
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Text;
namespace FunctionalTests.Extensions
{
static class HttpClientExtensions
{
public static HttpClient CreateIdempotentClient(this TestServer server)
{
var client = server.CreateClient();
client.DefaultRequestHeaders.Add("x-requestid", Guid.NewGuid().ToString());
return client;
}
}
}
| mit |
cyberCBM/GroPhoManager | external/Exiv2/include/exiv2/easyaccess.hpp | 3860 | // ***************************************************************** -*- C++ -*-
/*
* Copyright (C) 2004-2013 Andreas Huggel <ahuggel@gmx.net>
*
* This program is part of the Exiv2 distribution.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, 5th Floor, Boston, MA 02110-1301 USA.
*/
/*!
@file easyaccess.hpp
@brief Provides easy (high-level) access to some Exif meta data.
@version $Rev: 3091 $
@author Carsten Pfeiffer <pfeiffer@kde.org>
@date 28-Feb-09, gis: created
*/
#ifndef EASYACCESS_HPP_
#define EASYACCESS_HPP_
// *****************************************************************************
// included header files
#include "exif.hpp"
namespace Exiv2 {
// *****************************************************************************
// class declarations
class ExifData;
//! Return the orientation of the image
EXIV2API ExifData::const_iterator orientation(const ExifData& ed);
//! Return the ISO speed used to shoot the image
EXIV2API ExifData::const_iterator isoSpeed(const ExifData& ed);
//! Return the flash bias value
EXIV2API ExifData::const_iterator flashBias(const ExifData& ed);
//! Return the exposure mode setting
EXIV2API ExifData::const_iterator exposureMode(const ExifData& ed);
//! Return the scene mode setting
EXIV2API ExifData::const_iterator sceneMode(const ExifData& ed);
//! Return the macro mode setting
EXIV2API ExifData::const_iterator macroMode(const ExifData& ed);
//! Return the image quality setting
EXIV2API ExifData::const_iterator imageQuality(const ExifData& ed);
//! Return the white balance setting
EXIV2API ExifData::const_iterator whiteBalance(const ExifData& ed);
//! Return the name of the lens used
EXIV2API ExifData::const_iterator lensName(const ExifData& ed);
//! Return the saturation level
EXIV2API ExifData::const_iterator saturation(const ExifData& ed);
//! Return the sharpness level
EXIV2API ExifData::const_iterator sharpness(const ExifData& ed);
//! Return the contrast level
EXIV2API ExifData::const_iterator contrast(const ExifData& ed);
//! Return the scene capture type
EXIV2API ExifData::const_iterator sceneCaptureType(const ExifData& ed);
//! Return the metering mode setting
EXIV2API ExifData::const_iterator meteringMode(const ExifData& ed);
//! Return the camera make
EXIV2API ExifData::const_iterator make(const ExifData& ed);
//! Return the camera model
EXIV2API ExifData::const_iterator model(const ExifData& ed);
//! Return the exposure time
EXIV2API ExifData::const_iterator exposureTime(const ExifData& ed);
//! Return the F number
EXIV2API ExifData::const_iterator fNumber(const ExifData& ed);
//! Return the subject distance
EXIV2API ExifData::const_iterator subjectDistance(const ExifData& ed);
//! Return the camera serial number
EXIV2API ExifData::const_iterator serialNumber(const ExifData& ed);
//! Return the focal length setting
EXIV2API ExifData::const_iterator focalLength(const ExifData& ed);
//! Return the AF point
EXIV2API ExifData::const_iterator afPoint(const ExifData& ed);
} // namespace Exiv2
#endif // EASYACCESS_HPP_
| mit |
stoplightio/gitlabhq | lib/gitlab/git/user.rb | 971 | # frozen_string_literal: true
module Gitlab
module Git
class User
attr_reader :username, :name, :email, :gl_id
def self.from_gitlab(gitlab_user)
new(gitlab_user.username, gitlab_user.name, gitlab_user.commit_email, Gitlab::GlId.gl_id(gitlab_user))
end
def self.from_gitaly(gitaly_user)
new(
gitaly_user.gl_username,
Gitlab::EncodingHelper.encode!(gitaly_user.name),
Gitlab::EncodingHelper.encode!(gitaly_user.email),
gitaly_user.gl_id
)
end
def initialize(username, name, email, gl_id)
@username = username
@name = name
@email = email
@gl_id = gl_id
end
def ==(other)
[username, name, email, gl_id] == [other.username, other.name, other.email, other.gl_id]
end
def to_gitaly
Gitaly::User.new(gl_username: username, gl_id: gl_id, name: name.b, email: email.b)
end
end
end
end
| mit |
hennevogel/osem | db/migrate/20140305102505_use_vdays_vpositions_defaults.rb | 409 | # frozen_string_literal: true
class UseVdaysVpositionsDefaults < ActiveRecord::Migration
def up
change_column :conferences, :use_vpositions, :boolean, default: false
change_column :conferences, :use_vdays, :boolean, default: false
end
def down
change_column :conferences, :use_vpositions, :boolean, default: nil
change_column :conferences, :use_vdays, :boolean, default: nil
end
end
| mit |
jaredthirsk/Core | src/LionFire.Structures/Serialization/SerializeDefaultAttribute.cs | 592 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace LionFire
{
[AttributeUsage(AttributeTargets.All, Inherited = false, AllowMultiple = true)]
public sealed class SerializeDefaultValueAttribute : Attribute
{
readonly bool? serializeDefaultValue;
public SerializeDefaultValueAttribute(bool serializeDefault = true)
{
this.serializeDefaultValue = serializeDefault;
}
public bool? SerializeDefaultValue
{
get { return serializeDefaultValue; }
}
}
}
| mit |
elitemn/GalaxyExplorer | Assets/Scripts/OrbitUpdater.cs | 8030 | // Copyright Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
using System;
using UnityEngine;
public class OrbitUpdater : MonoBehaviour
{
/// <summary>
/// Semi-major axis, km / DistanceScaleFactor
/// </summary>
public float SemiMajorAxis;
public float SemiMajorAxisReal;
public float CurrentSemiMajorAxis
{
get
{
return Mathf.Lerp(SemiMajorAxis, SemiMajorAxisReal * RealitySemiMajorAxisScale, Reality);
}
}
/// <summary>
/// Eccentricity
/// </summary>
public float Eccentricity;
public float EccentricityReal;
private float CurrentEccentricity
{
get
{
return Mathf.Lerp(Eccentricity, EccentricityReal, Reality);
}
}
/// <summary>
/// Argument of perigee, radians
/// </summary>
public float ArgumentOfPerigee;
/// <summary>
/// Inclination, radians
/// </summary>
public float Inclination;
public float InclinationReal;
private float CurrentInclination
{
get
{
return Mathf.Lerp(Inclination, InclinationReal, Reality);
}
}
/// <summary>
/// Right ascension of ascending node, radians
/// </summary>
public float RightAscensionOfAscendingNode;
/// <summary>
/// Mean anomaly, radians
/// </summary>
public float MeanAnomaly;
/// <summary>
/// Sidereal period, days
/// </summary>
public float Period;
public float PeriodReal;
public float CurrentPeriod
{
get
{
return Mathf.Lerp(Period, PeriodReal, Reality);
}
}
/// <summary>
/// Orbital plane
/// </summary>
public Coordinates Plane;
/// <summary>
/// True anomaly, radians
/// </summary>
public float TrueAnomaly;
/// <summary>
/// Epoch, JD
/// </summary>
public float Epoch;
/// <summary>
/// Realilty from 0 (schematic) to 1 (real)
/// </summary>
[Range(0, 1)]
public float Reality;
public float RealitySemiMajorAxisScale = 1;
public float Speed = 100.0f;
public float SpeedMultiplier = 1;
public float TransitionSpeedMultiplier = 1.0f;
public int MaxIterations = 50;
private DateTime myDateTime;
public DateTime StartDate { get; private set; }
private bool computed = false;
// Use this for initialization
private void Start()
{
StartDate = myDateTime = DateTime.Now;
}
// Update is called once per frame
private void Update()
{
if (computed && TransitionSpeedMultiplier == 0.0f)
{
// Don't animate the planet rotation during transitions
return;
}
myDateTime += TimeSpan.FromDays(Time.deltaTime * Speed * SpeedMultiplier * TransitionSpeedMultiplier);
transform.localPosition = CalculatePosition(myDateTime);
computed = true;
}
public Vector3 CalculatePosition(DateTime time)
{
float trueAnomoly = CalculateTrueAnomaly(time);
return CalculatePosition(trueAnomoly);
}
/// <summary>
/// True anomaly using Newton-Raphson iteration
/// </summary>
/// <param name="orbit"></param>
/// <param name="dateTime"></param>
/// <returns></returns>
public float CalculateTrueAnomaly(DateTime dateTime)
{
const float epsilon = 0.000001f;
float trueAnomoly;
// Mean Anomoly
float meanAnomoly = CalculateMeanAnomaly(dateTime);
// Eccentric Anomaly
float eccentricAnomoly, oldEccentricAnomoly, newEccentricAnomoly = meanAnomoly + (CurrentEccentricity / 2);
// Solve [ 0 = E - e sin(E) - M ] for E using Newton-Raphson: Xn+1 = Xn - [ f(Xn) / f'(Xn) ]
// E = Eccentric Anomaly, M = Mean Anomaly
int iterations = 0;
do
{
iterations++;
oldEccentricAnomoly = newEccentricAnomoly;
newEccentricAnomoly = oldEccentricAnomoly - (oldEccentricAnomoly - (CurrentEccentricity * Mathf.Sin(oldEccentricAnomoly)) - meanAnomoly) / (1.0f - (CurrentEccentricity * Mathf.Cos(oldEccentricAnomoly)));
}
while (Mathf.Abs(oldEccentricAnomoly - newEccentricAnomoly) > epsilon && iterations < MaxIterations);
// Iteractions
if (iterations == MaxIterations)
{
trueAnomoly = TrueAnomaly;
}
else
{
eccentricAnomoly = newEccentricAnomoly;
float cosEccentricAnomoly = Mathf.Cos(eccentricAnomoly);
// Solve cos(bodyAngle) = ( cos(E) - e ) / (1 - e cos(E) ) to get body angle with sun
// E = Eccentric Anomaly, e = Eccentricity
trueAnomoly = Mathf.Acos((cosEccentricAnomoly - CurrentEccentricity) / (1.0f - (CurrentEccentricity * cosEccentricAnomoly)));
// Arccos returns value between 0 and Pi, but when Mean Anomoly > Pi (ie past halfway point) take (TwoPi - angle) to get solution between Pi and TwoPi
if (meanAnomoly > Mathf.PI)
{
trueAnomoly = (Mathf.PI * 2.0f) - trueAnomoly;
}
// Fail
if (float.IsNaN(trueAnomoly))
{
trueAnomoly = TrueAnomaly;
}
}
return trueAnomoly;
}
/// <summary>
/// Mean anomaly
/// </summary>
/// <returns>Mean anomaly, radians (0 to TwoPi)</returns>
public float CalculateMeanAnomaly(DateTime dateTime)
{
float angle = (ElapsedDays(dateTime, Epoch) % CurrentPeriod) / CurrentPeriod * (Mathf.PI * 2) * (-1);
// Add mean anomaly at defined epoch
angle += MeanAnomaly;
// Wrap angle 0-TwoPi
if (angle > Mathf.PI * 2)
{
angle -= Mathf.PI * 2;
}
else if (angle < 0)
{
angle += Mathf.PI * 2;
}
return angle;
}
/// <summary>
/// Calculate position in orbit relative to orbit origin
/// </summary>
/// <param name="trueAnomaly">True anomaly, radians</param>
public Vector3 CalculatePosition(float trueAnomaly)
{
// Compute radius from orbit origin
float radius = CurrentSemiMajorAxis * (1 - CurrentEccentricity * CurrentEccentricity) / (1 + CurrentEccentricity * Mathf.Cos(trueAnomaly));
// Calculate position relative to orbit origin
// XZ-plane is ecliptic, Y towards celestial north pole
return new Vector3(
radius * (Mathf.Cos(RightAscensionOfAscendingNode) * Mathf.Cos(trueAnomaly + ArgumentOfPerigee) - Mathf.Sin(RightAscensionOfAscendingNode) * Mathf.Sin(trueAnomaly + ArgumentOfPerigee) * Mathf.Cos(CurrentInclination)),
radius * (Mathf.Sin(trueAnomaly + ArgumentOfPerigee) * Mathf.Sin(CurrentInclination)),
-radius * (Mathf.Sin(RightAscensionOfAscendingNode) * Mathf.Cos(trueAnomaly + ArgumentOfPerigee) + Mathf.Cos(RightAscensionOfAscendingNode) * Mathf.Sin(trueAnomaly + ArgumentOfPerigee) * Mathf.Cos(CurrentInclination)));
}
/// <summary>
/// Elapsed days since J2000.0
/// </summary>
/// <returns></returns>
public float ElapsedDays(DateTime dateTime, float epoch)
{
return ToJulianDate(dateTime) - epoch;
}
public float ToJulianDate(DateTime dateTime)
{
return ToJulianDay(dateTime) + (dateTime.Hour - 12) / 24f + dateTime.Minute / 1440f + dateTime.Second / 86400f;
}
public long ToJulianDay(DateTime dateTime)
{
int month = dateTime.Month;
int day = dateTime.Day;
int year = dateTime.Year;
if (month < 3)
{
month += 12;
year -= 1;
}
return day + (153 * month - 457) / 5 + 365 * year + (year / 4) - (year / 100) + (year / 400) + 1721119;
}
public enum Coordinates
{
Ecliptic,
Equatorial,
Galactic,
Laplace
}
}
| mit |
carldai0106/aspnetboilerplate | test/Abp.Zero.SampleApp.Tests/Users/UserOrganizationUnit_Tests.cs | 5998 | using System.Linq;
using System.Threading.Tasks;
using Abp.Configuration;
using Abp.IdentityFramework;
using Abp.Organizations;
using Abp.Zero.Configuration;
using Abp.Zero.SampleApp.MultiTenancy;
using Abp.Zero.SampleApp.Users;
using Microsoft.AspNet.Identity;
using Shouldly;
using Xunit;
namespace Abp.Zero.SampleApp.Tests.Users
{
public class UserOrganizationUnit_Tests : SampleAppTestBase
{
private readonly UserManager _userManager;
private readonly Tenant _defaultTenant;
private readonly User _defaultTenantAdmin;
private ISettingManager _settingManager;
public UserOrganizationUnit_Tests()
{
_defaultTenant = GetDefaultTenant();
_defaultTenantAdmin = GetDefaultTenantAdmin();
AbpSession.TenantId = _defaultTenant.Id;
AbpSession.UserId = _defaultTenantAdmin.Id;
_userManager = Resolve<UserManager>();
_settingManager = Resolve<ISettingManager>();
}
[Fact]
public async Task Test_IsInOrganizationUnitAsync()
{
//Act & Assert
(await _userManager.IsInOrganizationUnitAsync(_defaultTenantAdmin, GetOU("OU11"))).ShouldBe(true);
(await _userManager.IsInOrganizationUnitAsync(_defaultTenantAdmin, GetOU("OU2"))).ShouldBe(false);
}
[Fact]
public async Task Test_AddToOrganizationUnitAsync()
{
//Arrange
var ou2 = GetOU("OU2");
//Act
await _userManager.AddToOrganizationUnitAsync(_defaultTenantAdmin, ou2);
//Assert
(await _userManager.IsInOrganizationUnitAsync(_defaultTenantAdmin, ou2)).ShouldBe(true);
UsingDbContext(context => context.UserOrganizationUnits.FirstOrDefault(ou => ou.UserId == _defaultTenantAdmin.Id && ou.OrganizationUnitId == ou2.Id).ShouldNotBeNull());
}
[Fact]
public async Task Test_RemoveFromOrganizationUnitAsync()
{
//Arrange
var ou11 = GetOU("OU11");
//Act
await _userManager.RemoveFromOrganizationUnitAsync(_defaultTenantAdmin, ou11);
//Assert
(await _userManager.IsInOrganizationUnitAsync(_defaultTenantAdmin, ou11)).ShouldBe(false);
UsingDbContext(context => context.UserOrganizationUnits.FirstOrDefault(ou => ou.UserId == _defaultTenantAdmin.Id && ou.OrganizationUnitId == ou11.Id).IsDeleted.ShouldBeTrue());
}
[Fact]
public async Task Should_Remove_User_From_Organization_When_User_Is_Deleted()
{
//Arrange
var user = CreateAndGetTestUser();
var ou11 = GetOU("OU11");
await _userManager.AddToOrganizationUnitAsync(user, ou11);
(await _userManager.IsInOrganizationUnitAsync(user, ou11)).ShouldBe(true);
//Act
(await _userManager.DeleteAsync(user)).CheckErrors();
//Assert
(await _userManager.IsInOrganizationUnitAsync(user, ou11)).ShouldBe(false);
}
[Theory]
[InlineData(new object[] { new string[0] })]
[InlineData(new object[] { new[] { "OU12", "OU21" } })]
[InlineData(new object[] { new[] { "OU11", "OU12", "OU2" } })]
public async Task Test_SetOrganizationUnitsAsync(string[] organizationUnitNames)
{
//Arrange
var organizationUnitIds = organizationUnitNames.Select(oun => GetOU(oun).Id).ToArray();
//Act
await _userManager.SetOrganizationUnitsAsync(_defaultTenantAdmin, organizationUnitIds);
//Assert
UsingDbContext(context =>
{
context.UserOrganizationUnits
.Count(uou => uou.UserId == _defaultTenantAdmin.Id && organizationUnitIds.Contains(uou.OrganizationUnitId))
.ShouldBe(organizationUnitIds.Length);
});
}
[Fact]
public async Task Test_GetUsersInOrganizationUnit()
{
//Act & Assert
(await _userManager.GetUsersInOrganizationUnit(GetOU("OU11"))).Count.ShouldBe(1);
(await _userManager.GetUsersInOrganizationUnit(GetOU("OU1"))).Count.ShouldBe(0);
(await _userManager.GetUsersInOrganizationUnit(GetOU("OU1"), true)).Count.ShouldBe(1);
}
private OrganizationUnit GetOU(string diplayName)
{
var organizationUnit = UsingDbContext(context => context.OrganizationUnits.FirstOrDefault(ou => ou.DisplayName == diplayName));
organizationUnit.ShouldNotBeNull();
return organizationUnit;
}
private User CreateAndGetTestUser()
{
WithUnitOfWork(() => _userManager.Create(
new User
{
EmailAddress = "emre@aspnetboilerplate.com",
Name = "Yunus",
Surname = "Emre",
UserName = "yunus.emre",
IsEmailConfirmed = true,
Password = "AM4OLBpptxBYmM79lGOX9egzZk3vIQU3d/gFCJzaBjAPXzYIK3tQ2N7X4fcrHtElTw==" //123qwe
}));
return UsingDbContext(
context =>
{
return context.Users.Single(u => u.UserName == "yunus.emre");
});
}
[Fact]
public async Task Test_SetOrganizationUnitsAsync_With_MaxUserMembershipCount()
{
await WithUnitOfWorkAsync(async () =>
{
await _settingManager.ChangeSettingForApplicationAsync(
AbpZeroSettingNames.OrganizationUnits.MaxUserMembershipCount,
"1");
var organizationUnitIds = (new[] { /*"OU11", "OU12",*/ "OU2" }).Select(oun => GetOU(oun).Id).ToArray();
await _userManager.SetOrganizationUnitsAsync(_defaultTenantAdmin, organizationUnitIds);
});
}
}
}
| mit |
iBenza/rendezvous | db/migrate/20131130111339_add_devise_to_users.rb | 1702 | class AddDeviseToUsers < ActiveRecord::Migration
def self.up
change_table(:users) do |t|
## Database authenticatable
t.string :email, null: false, default: ''
t.string :encrypted_password, null: false, default: ''
## Recoverable
t.string :reset_password_token
t.datetime :reset_password_sent_at
## Rememberable
t.datetime :remember_created_at
## Trackable
t.integer :sign_in_count, default: 0, null: false
t.datetime :current_sign_in_at
t.datetime :last_sign_in_at
t.string :current_sign_in_ip
t.string :last_sign_in_ip
## Confirmable
# t.string :confirmation_token
# t.datetime :confirmed_at
# t.datetime :confirmation_sent_at
# t.string :unconfirmed_email # Only if using reconfirmable
## Lockable
# t.integer :failed_attempts, :default => 0, :null => false # Only if lock strategy is :failed_attempts
# t.string :unlock_token # Only if unlock strategy is :email or :both
# t.datetime :locked_at
# Uncomment below if timestamps were not included in your original model.
# t.timestamps
end
add_index :users, :email, unique: true
add_index :users, :reset_password_token, unique: true
# add_index :users, :confirmation_token, :unique => true
# add_index :users, :unlock_token, :unique => true
end
def self.down
# By default, we don't want to make any assumption about how to roll back a migration when your
# model already existed. Please edit below which fields you would like to remove in this migration.
fail ActiveRecord::IrreversibleMigration
end
end
| mit |
eytanbiala/fastlane | lib/fastlane/actions/commit_version_bump.rb | 6430 | module Fastlane
module Actions
# Commits the current changes in the repo as a version bump, checking to make sure only files which contain version information have been changed.
class CommitVersionBumpAction < Action
def self.run(params)
require 'xcodeproj'
require 'pathname'
require 'set'
require 'shellwords'
xcodeproj_path = params[:xcodeproj] ? File.expand_path(File.join('.', params[:xcodeproj])) : nil
# find the repo root path
repo_path = Actions.sh('git rev-parse --show-toplevel').strip
repo_pathname = Pathname.new(repo_path)
if xcodeproj_path
# ensure that the xcodeproj passed in was OK
raise "Could not find the specified xcodeproj: #{xcodeproj_path}" unless File.directory?(xcodeproj_path)
else
# find an xcodeproj (ignoring the Cocoapods one)
xcodeproj_paths = Dir[File.expand_path(File.join(repo_path, '**/*.xcodeproj'))].reject { |path| /Pods\/.*.xcodeproj/ =~ path }
# no projects found: error
raise 'Could not find a .xcodeproj in the current repository\'s working directory.'.red if xcodeproj_paths.count == 0
# too many projects found: error
if xcodeproj_paths.count > 1
relative_projects = xcodeproj_paths.map { |e| Pathname.new(e).relative_path_from(repo_pathname).to_s }.join("\n")
raise "Found multiple .xcodeproj projects in the current repository's working directory. Please specify your app's main project: \n#{relative_projects}".red
end
# one project found: great
xcodeproj_path = xcodeproj_paths.first
end
# find the pbxproj path, relative to git directory
pbxproj_pathname = Pathname.new(File.join(xcodeproj_path, 'project.pbxproj'))
pbxproj_path = pbxproj_pathname.relative_path_from(repo_pathname).to_s
# find the info_plist files
project = Xcodeproj::Project.open(xcodeproj_path)
info_plist_files = project.objects.select { |object| object.isa == 'XCBuildConfiguration' }.map(&:to_hash).map { |object_hash| object_hash['buildSettings'] }.select { |build_settings| build_settings.key?('INFOPLIST_FILE') }.map { |build_settings| build_settings['INFOPLIST_FILE'] }.uniq.map { |info_plist_path| Pathname.new(File.expand_path(File.join(xcodeproj_path, '..', info_plist_path))).relative_path_from(repo_pathname).to_s }
# create our list of files that we expect to have changed, they should all be relative to the project root, which should be equal to the git workdir root
expected_changed_files = []
expected_changed_files << pbxproj_path
expected_changed_files << info_plist_files
expected_changed_files.flatten!.uniq!
# get the list of files that have actually changed in our git workdir
git_dirty_files = Actions.sh('git diff --name-only HEAD').split("\n") + Actions.sh('git ls-files --other --exclude-standard').split("\n")
# little user hint
raise 'No file changes picked up. Make sure you run the `increment_build_number` action first.'.red if git_dirty_files.empty?
# check if the files changed are the ones we expected to change (these should be only the files that have version info in them)
changed_files_as_expected = (Set.new(git_dirty_files.map(&:downcase)) == Set.new(expected_changed_files.map(&:downcase)))
unless changed_files_as_expected
unless params[:force]
raise "Found unexpected uncommited changes in the working directory. Expected these files to have changed: \n#{expected_changed_files.join("\n")}.\nBut found these actual changes: \n#{git_dirty_files.join("\n")}.\nMake sure you have cleaned up the build artifacts and are only left with the changed version files at this stage in your lane, and don't touch the working directory while your lane is running. You can also use the :force option to bypass this check, and always commit a version bump regardless of the state of the working directory.".red
end
end
# get the absolute paths to the files
git_add_paths = expected_changed_files.map { |path| File.expand_path(File.join(repo_pathname, path)) }
# then create a commit with a message
Actions.sh("git add #{git_add_paths.map(&:shellescape).join(' ')}")
begin
Actions.sh("git commit -m '#{params[:message]}'")
Helper.log.info "Committed \"#{params[:message]}\" 💾.".green
rescue => ex
Helper.log.info "Didn't commit any changes.".yellow
end
end
def self.description
"Creates a 'Version Bump' commit. Run after `increment_build_number`"
end
def self.available_options
[
FastlaneCore::ConfigItem.new(key: :message,
env_name: "FL_COMMIT_BUMP_MESSAGE",
description: "The commit message when committing the version bump",
default_value: "Version Bump"),
FastlaneCore::ConfigItem.new(key: :xcodeproj,
env_name: "FL_BUILD_NUMBER_PROJECT",
description: "The path to your project file (Not the workspace). If you have only one, this is optional",
optional: true,
verify_block: Proc.new do |value|
raise "Please pass the path to the project, not the workspace".red if value.include?"workspace"
raise "Could not find Xcode project".red unless File.exists?(value)
end),
FastlaneCore::ConfigItem.new(key: :force,
env_name: "FL_FORCE_COMMIT",
description: "Forces the commit, even if other files than the ones containing the version number have been modified",
optional: true,
default_value: false,
is_string: false)
]
end
def self.author
"lmirosevic"
end
def self.is_supported?(platform)
[:ios, :mac].include?platform
end
end
end
end
| mit |
Ladas/fog | lib/tasks/changelog_task.rb | 5644 | require "rake"
require "rake/tasklib"
module Fog
module Rake
class ChangelogTask < ::Rake::TaskLib
def initialize
desc "Update the changelog since the last release"
task(:changelog) do
@changelog = []
@changelog << release_header
process_commits
@changelog << "**MVP!** #{mvp}" if mvp
@changelog << blank_line
add_commits_to_changelog
save_changelog
end
end
private
def release_header
<<-HEREDOC
## #{Fog::VERSION} #{timestamp}
*Hash* #{sha}
Statistic | Value
------------- | --------:
Collaborators | #{collaborators}
Downloads | #{downloads}
Forks | #{forks}
Open Issues | #{open_issues}
Watchers | #{watchers}
HEREDOC
end
def save_changelog
old_changelog = File.read('CHANGELOG.md')
File.open('CHANGELOG.md', 'w') do |file|
file.write(@changelog.join("\n"))
file.write("\n\n")
file.write(old_changelog)
end
end
def blank_line
''
end
def add_commits_to_changelog
@changes.keys.sort.each do |tag|
@changelog << "#### [#{tag}]"
@changes[tag].each do |commit|
@changelog << "* #{commit}"
end
@changelog << blank_line
end
end
def process_commits
shortlog = `git shortlog #{last_release_sha}..HEAD`
@changes = {}
@committers = {}
@committer = nil
shortlog.split("\n").each do |line|
@current_line = line
if committer_line?
@committer = committer_match[1]
add_committer
elsif !release_merge_line?
add_period_if_necessary
@current_line.lstrip!
add_commit_line
increment_commits
end
end
end
def add_commit_line
@current_line.gsub!(/^\[([^\]]*)\] /, '')
tag = $1 || 'misc'
@changes[tag] ||= []
@changes[tag] << "#{@current_line} thanks #{@committer}"
end
def increment_commits
@committers[@committer] += 1
end
def add_committer
@committers[@committer] = 0
end
def committers_sorted_by_commits
committer_pairs = @committers.to_a.sort {|x,y| y[1] <=> x[1]}
committer_pairs.reject! {|pair| pair.last < 1 }
committer_pairs.map {|pair| pair.first }
end
def former_mvp?(committer)
[
'Aaron Suggs',
'ller', #"Achim Ledermüller" UTF-8 fail?
'Ash Wilson',
'Benson Kalahar',
'Brian Hartsock',
'bryanl',
'Chris Luo',
'Chris Roberts',
'Christopher Oliver',
'Colin Hebert',
'Daniel Reichert',
'Darren Hague',
'Decklin Foster',
'Dylan Egan',
'Erik Michaels-Ober',
'Frederick Cheung',
'geemus',
'Henry Addison',
'James Bence',
'Josef Stribny',
'Kevin Menard',
'Kevin Olbrich',
'Kyle Rames',
'Ladislav Smola',
'Lincoln Stoll',
'Luqman Amjad',
'Michael Hale',
'Michael Zeng',
'Mike Hagedorn',
'Mike Pountney',
'Nat Welch',
'Nick Osborn',
'nightshade427',
'Oleg Vivtash',
'Patrick Debois',
'Paul Thornthwaite',
'Paulo Henrique Lopes Ribeiro',
'Peter Souter',
'Phil Ross',
'Rich Daley',
'Rodrigo Estebanez',
'Rupak Ganguly',
'Stepan G. Fedorov',
'TerryHowe',
'Wesley Beary'
].include?(committer)
end
def mvp
return @mvp if @mvp
committers_sorted_by_commits.each do |committer|
unless former_mvp?(committer)
@mvp = committer
return @mvp
end
end
nil
end
def add_period_if_necessary
@current_line << "." unless @current_line[-1] == '.'
end
def release_merge_line?
@current_line =~ /^\s*((Merge.*)|(Release.*))?$/
end
def committer_line?
committer_match != nil
end
def committer_match
@current_line.match /([\w\s]+)\s+\(\d+\)/
end
def last_release_sha
`cat CHANGELOG.md | head -2`.split(' ').last
end
def downloads
repsonse = Excon.get('https://rubygems.org/api/v1/gems/fog.json')
data = Fog::JSON.decode(repsonse.body)
data['downloads']
end
def collaborators
response = Excon.get('https://api.github.com/repos/fog/fog/collaborators', :headers => {'User-Agent' => 'geemus'})
data = Fog::JSON.decode(response.body)
data.length
end
def forks
repo_metadata['forks']
end
def open_issues
repo_metadata['open_issues']
end
def watchers
repo_metadata['watchers']
end
def repo_metadata
return @repo_metadata if @repo_metadata
response = Excon.get('https://api.github.com/repos/fog/fog', :headers => {'User-Agent' => 'geemus'})
data = Fog::JSON.decode(response.body)
@repo_metadata = data.select {|key, value| ['forks', 'open_issues', 'watchers'].include?(key)}
end
def sha
`git log | head -1`.split(' ').last
end
def timestamp
@time ||= Time.now.utc.strftime('%m/%d/%Y')
end
end
end
end
| mit |
kevinansfield/Ghost | core/server/api/canary/pages.js | 5728 | const models = require('../../models');
const common = require('../../lib/common');
const urlUtils = require('../../lib/url-utils');
const ALLOWED_INCLUDES = ['tags', 'authors', 'authors.roles'];
const UNSAFE_ATTRS = ['status', 'authors', 'visibility'];
module.exports = {
docName: 'pages',
browse: {
options: [
'include',
'filter',
'fields',
'formats',
'limit',
'order',
'page',
'debug',
'absolute_urls'
],
validation: {
options: {
include: {
values: ALLOWED_INCLUDES
},
formats: {
values: models.Post.allowedFormats
}
}
},
permissions: {
docName: 'posts',
unsafeAttrs: UNSAFE_ATTRS
},
query(frame) {
return models.Post.findPage(frame.options);
}
},
read: {
options: [
'include',
'fields',
'formats',
'debug',
'absolute_urls',
// NOTE: only for internal context
'forUpdate',
'transacting'
],
data: [
'id',
'slug',
'uuid'
],
validation: {
options: {
include: {
values: ALLOWED_INCLUDES
},
formats: {
values: models.Post.allowedFormats
}
}
},
permissions: {
docName: 'posts',
unsafeAttrs: UNSAFE_ATTRS
},
query(frame) {
return models.Post.findOne(frame.data, frame.options)
.then((model) => {
if (!model) {
throw new common.errors.NotFoundError({
message: common.i18n.t('errors.api.pages.pageNotFound')
});
}
return model;
});
}
},
add: {
statusCode: 201,
headers: {},
options: [
'include',
'source'
],
validation: {
options: {
include: {
values: ALLOWED_INCLUDES
},
source: {
values: ['html']
}
}
},
permissions: {
docName: 'posts',
unsafeAttrs: UNSAFE_ATTRS
},
query(frame) {
return models.Post.add(frame.data.pages[0], frame.options)
.then((model) => {
if (model.get('status') !== 'published') {
this.headers.cacheInvalidate = false;
} else {
this.headers.cacheInvalidate = true;
}
return model;
});
}
},
edit: {
headers: {},
options: [
'include',
'id',
'source',
// NOTE: only for internal context
'forUpdate',
'transacting'
],
validation: {
options: {
include: {
values: ALLOWED_INCLUDES
},
id: {
required: true
},
source: {
values: ['html']
}
}
},
permissions: {
docName: 'posts',
unsafeAttrs: UNSAFE_ATTRS
},
query(frame) {
return models.Post.edit(frame.data.pages[0], frame.options)
.then((model) => {
if (
model.get('status') === 'published' && model.wasChanged() ||
model.get('status') === 'draft' && model.previous('status') === 'published'
) {
this.headers.cacheInvalidate = true;
} else if (
model.get('status') === 'draft' && model.previous('status') !== 'published' ||
model.get('status') === 'scheduled' && model.wasChanged()
) {
this.headers.cacheInvalidate = {
value: urlUtils.urlFor({
relativeUrl: urlUtils.urlJoin('/p', model.get('uuid'), '/')
})
};
} else {
this.headers.cacheInvalidate = false;
}
return model;
});
}
},
destroy: {
statusCode: 204,
headers: {
cacheInvalidate: true
},
options: [
'include',
'id'
],
validation: {
options: {
include: {
values: ALLOWED_INCLUDES
},
id: {
required: true
}
}
},
permissions: {
docName: 'posts',
unsafeAttrs: UNSAFE_ATTRS
},
query(frame) {
frame.options.require = true;
return models.Post.destroy(frame.options)
.return(null)
.catch(models.Post.NotFoundError, () => {
throw new common.errors.NotFoundError({
message: common.i18n.t('errors.api.pages.pageNotFound')
});
});
}
}
};
| mit |
ap3h3ad/roll20-character-sheets | Palladium Rifts by Grinning Gecko/src/js/utils.js | 10271 | /**
* Aggregates repeating values
* @param {string[]} destinations An array of field names to output to
* @param {string} section A repeating section name
* @param {string[]} fields An array of fields to aggregate into the associated destination
* @param {...any} extras Some extra stuff
* If an extra is an object, it will be used for extendedProps.
*/
async function repeatingSumAsync(destinations, section, fields, ...extras) {
const isNumber = (value) => parseFloat(value).toString() === value.toString();
const isOption = (value) =>
[...checks.valid, ...checks.roundtypes].includes(value);
const isRounding = (value) => checks.roundtypes.includes(value);
const isFraction = (value) =>
value.includes("/") && !(value.includes(",") || value.includes("|"));
const getTrimmed = (value) => value.toLowerCase().replace(/\s/g, "");
const getRounded = (type, value, pow) =>
(Math[type](value * Math.pow(10, pow)) / Math.pow(10, pow)).toFixed(
Math.max(0, pow)
);
const getFraction = (value /*{ console.log(`value: ${value}`); */) =>
parseInt(value.split("/")[0]) / parseInt(value.split("/")[1]);
const getMultiplier = (value, rounding = 1) =>
"undefined" === typeof value
? rounding
? 0
: 1
: isNumber(value)
? parseFloat(value)
: isFraction(value)
? getFraction(value)
: value;
if (!Array.isArray(destinations)) destinations = [getTrimmed(destinations)];
if (!Array.isArray(fields)) fields = [getTrimmed(fields)];
const fields_trimmed = fields.map((field) => getTrimmed(field).split(":")[0]);
const subfields = fields_trimmed.slice(0, destinations.length);
const checks = {
valid: ["multiplier"],
roundtypes: ["ceil", "round", "floor"],
};
let properties = { attributes: {}, options: {} };
let extendedProps = {};
extras.forEach((extra) => {
if (extra.constructor.name === "Object") {
extendedProps = extra;
return;
}
const [prop, v] = getTrimmed(extra).split(":");
const multiplier_maybe = getMultiplier(v, isRounding(prop));
const obj = isNumber(multiplier_maybe)
? subfields.reduce((obj, field) => {
obj[field] = multiplier_maybe;
return obj;
}, {})
: multiplier_maybe.split(",").reduce((obj, item) => {
const [stat, value] = item.split("|");
const multiplier = getMultiplier(value, isRounding(prop));
obj[stat] = multiplier;
return obj;
}, {});
properties[isOption(prop) ? "options" : "attributes"][prop] = obj;
});
const idArray = await getSectionIDsAsync(`repeating_${section}`);
const attrArray = idArray.reduce(
(m, id) => [
...m,
...fields_trimmed.map((field) => `repeating_${section}_${id}_${field}`),
],
[]
);
let filteredAttrArray = attrArray;
if (properties.attributes.filter) {
filteredAttrArray = attrArray.filter((attr) =>
Object.keys(properties.attributes.filter).some(
(sectionId) => sectionId && attr.includes(sectionId)
)
);
}
const v = await getAttrsAsync([
...filteredAttrArray,
...Object.keys(properties.attributes),
]);
const getValue = (section, id, field) =>
v[`repeating_${section}_${id}_${field}`] === "on"
? 1
: parseFloat(v[`repeating_${section}_${id}_${field}`]) || 0;
const commonMultipliers =
fields.length <= destinations.length
? []
: fields.splice(destinations.length, fields.length - destinations.length);
const output = destinations.reduce((obj, destination, index) => {
let sumTotal = idArray.reduce(
(total, id) =>
total +
getValue(section, id, fields_trimmed[index]) *
commonMultipliers.reduce(
(subtotal, mult) =>
subtotal *
(!mult.includes(":") ||
mult.split(":")[1].split(",").includes(fields_trimmed[index])
? getValue(section, id, mult.split(":")[0])
: 1),
1
),
0
);
sumTotal *=
properties.options.hasOwnProperty("multiplier") &&
Object.keys(properties.options.multiplier).includes(fields_trimmed[index])
? parseFloat(properties.options.multiplier[fields_trimmed[index]]) || 0
: 1;
sumTotal += Object.entries(properties.attributes).reduce(
(total, [key, value]) =>
(total += value.hasOwnProperty(fields_trimmed[index])
? parseFloat(v[key] || 0) *
(parseFloat(value[fields_trimmed[index]]) || 1)
: 0),
0
);
checks.roundtypes.forEach((type) => {
if (properties.options.hasOwnProperty(type)) {
if (
Object.keys(properties.options[type]).includes(fields_trimmed[index])
) {
sumTotal = getRounded(
type,
sumTotal,
+properties.options[type][fields_trimmed[index]] || 0
);
} else if (
properties.options[type] == "0" ||
!isNaN(+properties.options[type] || "x")
) {
sumTotal = getRounded(type, sumTotal, +properties.options[type]);
}
}
});
obj[destination] = sumTotal;
return obj;
}, {});
await setAttrsAsync(output);
}
async function repeatingPickBestAsync({
destinations,
section,
fields,
defaultValues,
ranks,
filter,
}) {
console.log("repeatingPickBestAsync", {
destinations,
section,
fields,
defaultValues,
ranks,
filter,
});
const sectionIds = await getSectionIDsAsync(`repeating_${section}`);
const attrArray = sectionIds.reduce(
(m, id) => [
...m,
...fields.map((field) => `repeating_${section}_${id}_${field}`),
],
[]
);
let filteredAttrArray = attrArray;
if (filter) {
filteredAttrArray = attrArray.filter((attr) =>
filter.some((sectionId) => sectionId && attr.includes(sectionId))
);
}
const a = await getAttrsAsync(filteredAttrArray);
const output = destinations.reduce((acc, cur, i) => {
acc[cur] = Object.keys(a)
.filter((val) => {
// the 4th part of `val` needs to match fields[i]
const [, , , ...attrParts] = val.split("_");
const attr = attrParts.join("_");
return attr == fields[i];
})
.reduce((accVal, attrCur) => {
if (+a[attrCur] == 0) {
return accVal;
}
if (+accVal != 0) {
if (ranks[i] === "high") {
return +a[attrCur] > +accVal ? a[attrCur] : accVal;
} else {
return +a[attrCur] < +accVal ? a[attrCur] : accVal;
}
} else {
return a[attrCur];
}
}, defaultValues[i]);
return acc;
}, {});
console.log(output);
await setAttrsAsync(output);
}
async function repeatingStringConcatAsync({
destinations,
section,
fields,
filter,
}) {
console.log({
destinations,
section,
fields,
filter,
});
const sectionIds = await getSectionIDsAsync(`repeating_${section}`);
const attrArray = sectionIds.reduce(
(m, id) => [
...m,
...fields.map((field) => `repeating_${section}_${id}_${field}`),
],
[]
);
let filteredAttrArray = attrArray;
if (filter) {
filteredAttrArray = attrArray.filter((attr) =>
filter.some((sectionId) => sectionId && attr.includes(sectionId))
);
}
const a = await getAttrsAsync(filteredAttrArray);
const output = destinations.reduce((acc, cur, i) => {
acc[cur] = Object.keys(a)
.filter((val) => {
// the 4th part of `val` needs to match fields[i]
const [, , , ...attrParts] = val.split("_");
const attr = attrParts.join("_");
return attr == fields[i];
})
.reduce((attrAcc, attrCur) => {
return a[attrCur] == "" || a[attrCur] == "0"
? attrAcc
: `${a[attrCur]}+${attrAcc}`
.replace(/\+\s*$/, "")
.replace(/^\s*\+/, "");
}, "");
return acc;
}, {});
await setAttrsAsync(output);
}
function getBiAttributeBonus(attr) {
const bonus = attr > 15 ? Math.ceil((Math.min(attr, 30) - 15) / 2) : 0;
return bonus;
}
function mergeAndAddObjects(data) {
const result = {};
data.forEach((obj) => {
for (let [key, value] of Object.entries(obj)) {
if (result[key]) {
if (key === "critical" || key === "knockout" || key === "deathblow") {
result[key] = result[key] > value ? value : result[key];
} else {
result[key] += value;
}
} else {
result[key] = value;
}
}
});
return result;
}
async function getSectionIDsOrderedAsync(sectionName) {
const v = await getAttrsAsync([`_reporder_repeating_${sectionName}`]);
const idArray = await getSectionIDsAsync(sectionName);
const reporderArray = v[`_reporder_repeating_${sectionName}`]
? v[`_reporder_repeating_${sectionName}`].toLowerCase().split(",")
: [];
const ids = [
...new Set(
reporderArray.filter((x) => idArray.includes(x)).concat(idArray)
),
];
return ids;
}
/**
* Add a token {attack} times to the Turn Tracker in order against other tokens.
* Requires API script access.
* For use on an action button.
*
* [[d20+@{selected|repeating_profiles_-MibcwHG5hZXUJn6A7OG_initiative} &{tracker}]]
*/
async function palladiumAddToTurnTracker(initKey, attacksKey) {
const { [initKey]: init, [attacksKey]: attacks } = await getAttrsAsync([
initKey,
attacksKey,
]);
const initString = init > 0 ? `+${init}` : `${init}`;
const roll = await startRoll(
`&{template:initiative} {{title=@{selected|character_name} rolls initiative!}} {{diceroll=[[1d20]]}} {{modifier=${initString}}}`
);
console.log(roll);
const computed = roll.results.diceroll.result + init;
console.log(computed);
finishRoll(roll.rollId, {
diceroll: computed,
});
const addToTracker = await startRoll(
`Setting @{selected|character_name}'s initiative to [[[[${computed}]] &{tracker}]] in the Turn Tracker.`
);
finishRoll(addToTracker.rollId);
// https://app.roll20.net/forum/post/6817409/multiple-initiative-values-for-a-single-character/?pageforid=6817748#post-6817748
const dupeTracker = await startRoll(`!dup-turn ${attacks}`);
finishRoll(dupeTracker.rollId);
}
| mit |
hnliji1107/photo-blog | libs/sysplugins/smarty_internal_config.php | 9638 | <?php
/**
* Smarty Internal Plugin Config
*
* @package Smarty
* @subpackage Config
* @author Uwe Tews
*/
/**
* Smarty Internal Plugin Config
*
* Main class for config variables
*
* @package Smarty
* @subpackage Config
*
* @property Smarty_Config_Source $source
* @property Smarty_Config_Compiled $compiled
* @ignore
*/
class Smarty_Internal_Config {
/**
* Samrty instance
*
* @var Smarty object
*/
public $smarty = null;
/**
* Object of config var storage
*
* @var object
*/
public $data = null;
/**
* Config resource
* @var string
*/
public $config_resource = null;
/**
* Compiled config file
*
* @var string
*/
public $compiled_config = null;
/**
* filepath of compiled config file
*
* @var string
*/
public $compiled_filepath = null;
/**
* Filemtime of compiled config Filemtime
*
* @var int
*/
public $compiled_timestamp = null;
/**
* flag if compiled config file is invalid and must be (re)compiled
* @var bool
*/
public $mustCompile = null;
/**
* Config file compiler object
*
* @var Smarty_Internal_Config_File_Compiler object
*/
public $compiler_object = null;
/**
* Constructor of config file object
*
* @param string $config_resource config file resource name
* @param Smarty $smarty Smarty instance
* @param object $data object for config vars storage
*/
public function __construct($config_resource, $smarty, $data = null)
{
$this->data = $data;
$this->smarty = $smarty;
$this->config_resource = $config_resource;
}
/**
* Returns the compiled filepath
*
* @return string the compiled filepath
*/
public function getCompiledFilepath()
{
return $this->compiled_filepath === null ?
($this->compiled_filepath = $this->buildCompiledFilepath()) :
$this->compiled_filepath;
}
/**
* Get file path.
*
* @return string
*/
public function buildCompiledFilepath()
{
$_compile_id = isset($this->smarty->compile_id) ? preg_replace('![^\w\|]+!', '_', $this->smarty->compile_id) : null;
$_flag = (int) $this->smarty->config_read_hidden + (int) $this->smarty->config_booleanize * 2
+ (int) $this->smarty->config_overwrite * 4;
$_filepath = sha1($this->source->name . $_flag);
// if use_sub_dirs, break file into directories
if ($this->smarty->use_sub_dirs) {
$_filepath = substr($_filepath, 0, 2) . DS
. substr($_filepath, 2, 2) . DS
. substr($_filepath, 4, 2) . DS
. $_filepath;
}
$_compile_dir_sep = $this->smarty->use_sub_dirs ? DS : '^';
if (isset($_compile_id)) {
$_filepath = $_compile_id . $_compile_dir_sep . $_filepath;
}
$_compile_dir = $this->smarty->getCompileDir();
return $_compile_dir . $_filepath . '.' . basename($this->source->name) . '.config' . '.php';
}
/**
* Returns the timpestamp of the compiled file
*
* @return integer the file timestamp
*/
public function getCompiledTimestamp()
{
return $this->compiled_timestamp === null
? ($this->compiled_timestamp = (file_exists($this->getCompiledFilepath())) ? filemtime($this->getCompiledFilepath()) : false)
: $this->compiled_timestamp;
}
/**
* Returns if the current config file must be compiled
*
* It does compare the timestamps of config source and the compiled config and checks the force compile configuration
*
* @return boolean true if the file must be compiled
*/
public function mustCompile()
{
return $this->mustCompile === null ?
$this->mustCompile = ($this->smarty->force_compile || $this->getCompiledTimestamp () === false || $this->smarty->compile_check && $this->getCompiledTimestamp () < $this->source->timestamp):
$this->mustCompile;
}
/**
* Returns the compiled config file
*
* It checks if the config file must be compiled or just read the compiled version
*
* @return string the compiled config file
*/
public function getCompiledConfig()
{
if ($this->compiled_config === null) {
// see if template needs compiling.
if ($this->mustCompile()) {
$this->compileConfigSource();
} else {
$this->compiled_config = file_get_contents($this->getCompiledFilepath());
}
}
return $this->compiled_config;
}
/**
* Compiles the config files
*
* @throws Exception
*/
public function compileConfigSource()
{
// compile template
if (!is_object($this->compiler_object)) {
// load compiler
$this->compiler_object = new Smarty_Internal_Config_File_Compiler($this->smarty);
}
// compile locking
if ($this->smarty->compile_locking) {
if ($saved_timestamp = $this->getCompiledTimestamp()) {
touch($this->getCompiledFilepath());
}
}
// call compiler
try {
$this->compiler_object->compileSource($this);
} catch (Exception $e) {
// restore old timestamp in case of error
if ($this->smarty->compile_locking && $saved_timestamp) {
touch($this->getCompiledFilepath(), $saved_timestamp);
}
throw $e;
}
// compiling succeded
// write compiled template
Smarty_Internal_Write_File::writeFile($this->getCompiledFilepath(), $this->getCompiledConfig(), $this->smarty);
}
/**
* load config variables
*
* @param mixed $sections array of section names, single section or null
* @param object $scope global,parent or local
*/
public function loadConfigVars($sections = null, $scope = 'local')
{
if ($this->data instanceof Smarty_Internal_Template) {
$this->data->properties['file_dependency'][sha1($this->source->filepath)] = array($this->source->filepath, $this->source->timestamp, 'file');
}
if ($this->mustCompile()) {
$this->compileConfigSource();
}
// pointer to scope
if ($scope == 'local') {
$scope_ptr = $this->data;
} elseif ($scope == 'parent') {
if (isset($this->data->parent)) {
$scope_ptr = $this->data->parent;
} else {
$scope_ptr = $this->data;
}
} elseif ($scope == 'root' || $scope == 'global') {
$scope_ptr = $this->data;
while (isset($scope_ptr->parent)) {
$scope_ptr = $scope_ptr->parent;
}
}
$_config_vars = array();
include($this->getCompiledFilepath());
// copy global config vars
foreach ($_config_vars['vars'] as $variable => $value) {
if ($this->smarty->config_overwrite || !isset($scope_ptr->config_vars[$variable])) {
$scope_ptr->config_vars[$variable] = $value;
} else {
$scope_ptr->config_vars[$variable] = array_merge((array) $scope_ptr->config_vars[$variable], (array) $value);
}
}
// scan sections
if (!empty($sections)) {
foreach ($_config_vars['sections'] as $this_section => $dummy) {
if (in_array($this_section, (array) $sections)) {
foreach ($_config_vars['sections'][$this_section]['vars'] as $variable => $value) {
if ($this->smarty->config_overwrite || !isset($scope_ptr->config_vars[$variable])) {
$scope_ptr->config_vars[$variable] = $value;
} else {
$scope_ptr->config_vars[$variable] = array_merge((array) $scope_ptr->config_vars[$variable], (array) $value);
}
}
}
}
}
}
/**
* set Smarty property in template context
*
* @param string $property_name property name
* @param mixed $value value
* @throws SmartyException if $property_name is not valid
*/
public function __set($property_name, $value)
{
switch ($property_name) {
case 'source':
case 'compiled':
$this->$property_name = $value;
return;
}
throw new SmartyException("invalid config property '$property_name'.");
}
/**
* get Smarty property in template context
*
* @param string $property_name property name
* @throws SmartyException if $property_name is not valid
*/
public function __get($property_name)
{
switch ($property_name) {
case 'source':
if (empty($this->config_resource)) {
throw new SmartyException("Unable to parse resource name \"{$this->config_resource}\"");
}
$this->source = Smarty_Resource::config($this);
return $this->source;
case 'compiled':
$this->compiled = $this->source->getCompiled($this);
return $this->compiled;
}
throw new SmartyException("config attribute '$property_name' does not exist.");
}
}
?> | mit |
mixonic/ember.js | tests/node/template-compiler-test.js | 1763 | const path = require('path');
const distPath = path.join(__dirname, '../../dist');
let templateCompiler;
QUnit.module('ember-template-compiler.js', function () {
QUnit.module('modern', function (hooks) {
hooks.beforeEach(function () {
this.templateCompilerPath = path.resolve(path.join(distPath, 'ember-template-compiler.js'));
templateCompiler = require(this.templateCompilerPath);
});
hooks.afterEach(function () {
// clear the previously cached version of this module
delete require.cache[this.templateCompilerPath];
});
QUnit.test('can be required', function (assert) {
assert.strictEqual(
typeof templateCompiler.precompile,
'function',
'precompile function is present'
);
assert.strictEqual(
typeof templateCompiler.compile,
'function',
'compile function is present'
);
});
QUnit.test('can access _Ember.ENV (private API used by ember-cli-htmlbars)', function (assert) {
assert.equal(typeof templateCompiler._Ember.ENV, 'object', '_Ember.ENV is present');
assert.notEqual(typeof templateCompiler._Ember.ENV, null, '_Ember.ENV is not null');
});
QUnit.test('can access _Ember.FEATURES (private API used by ember-cli-htmlbars)', function (
assert
) {
assert.equal(typeof templateCompiler._Ember.FEATURES, 'object', '_Ember.FEATURES is present');
assert.notEqual(typeof templateCompiler._Ember.FEATURES, null, '_Ember.FEATURES is not null');
});
QUnit.test('can access _Ember.VERSION (private API used by ember-cli-htmlbars)', function (
assert
) {
assert.equal(typeof templateCompiler._Ember.VERSION, 'string', '_Ember.VERSION is present');
});
});
});
| mit |
huoxudong125/helix-toolkit | Source/HelixToolkit.Wpf/Exporters/Exporters.cs | 2579 | // --------------------------------------------------------------------------------------------------------------------
// <copyright file="Exporters.cs" company="Helix Toolkit">
// Copyright (c) 2014 Helix Toolkit contributors
// </copyright>
// <summary>
// Contains a list of all supported exporters.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace HelixToolkit.Wpf
{
using System;
using System.IO;
/// <summary>
/// Contains a list of all supported exporters.
/// </summary>
public static class Exporters
{
/// <summary>
/// Default file export extension.
/// </summary>
public static readonly string DefaultExtension = ".png";
/// <summary>
/// File filter for all the supported exporters.
/// </summary>
public static readonly string Filter =
"Bitmap Files (*.png;*.jpg)|*.png;*.jpg|XAML Files (*.xaml)|*.xml|Kerkythea Files (*.xml)|*.xml|Wavefront Files (*.obj)|*.obj|Wavefront Files zipped (*.objz)|*.objz|Extensible 3D Graphics Files (*.x3d)|*.x3d|Collada Files (*.dae)|*.dae|STereoLithography (*.stl)|*.stl";
/// <summary>
/// Creates an exporter based on the extension of the specified path.
/// </summary>
/// <param name="path">
/// The output path.
/// </param>
/// <returns>
/// An exporter.
/// </returns>
public static IExporter Create(string path)
{
if (path == null)
{
return null;
}
string ext = Path.GetExtension(path);
if (ext == null)
{
return null;
}
switch (ext.ToLower())
{
case ".png":
case ".jpg":
return new BitmapExporter();
case ".obj":
case ".objz":
return new ObjExporter();
case ".xml":
return new KerkytheaExporter();
case ".x3d":
return new X3DExporter();
case ".dae":
return new ColladaExporter();
case ".stl":
return new StlExporter();
default:
throw new InvalidOperationException("File format not supported.");
}
}
}
} | mit |
SidHarder/meetings | client/jspm_packages/npm/core-js@2.1.0/library/fn/array/copy-within.js | 121 | /* */
require('../../modules/es6.array.copy-within');
module.exports = require('../../modules/_core').Array.copyWithin;
| mit |
gabrielstuff/fusejs | src/lang/hash.js | 9364 | /*------------------------------- LANG: HASH -------------------------------*/
fuse.Hash = (function() {
function Klass() { }
function Hash(object) {
return setWithObject((new Klass).clear(), object);
}
function merge(object) {
return setWithObject(this.clone(), object);
}
function set(key, value) {
return fuse.Object.isString(key)
? setValue(this, key, value)
: setWithObject(this, key);
}
function unset(key) {
var data = this._data,
i = -1,
keys = fuse.Object.isArray(key) ? key : arguments;
while (key = keys[++i]) {
if ((fuse.uid + key) in data) {
unsetByIndex(this, indexOfKey(this, key));
}
}
return this;
}
function indexOfKey(hash, key) {
key = String(key);
var i = -1,
keys = hash._keys,
length = keys.length;
while (++i < length) {
if (keys[i] == key) {
return i;
}
}
}
function setValue(hash, key, value) {
if (!key.length) return hash;
var data = hash._data,
uidKey = fuse.uid + key,
keys = hash._keys;
// avoid a method call to Hash#hasKey
if (uidKey in data) {
unsetByIndex(hash, indexOfKey(hash, key));
}
keys.push(key = fuse.String(key));
hash._pairs.push(fuse.Array(key, value));
hash._values.push(value);
hash._data[uidKey] =
hash._object[key] = value;
return hash;
}
function setWithObject(hash, object) {
if (fuse.Object.isHash(object)) {
var pair,
i = -1,
pairs = object._pairs;
while (pair = pairs[++i]) {
setValue(hash, pair[0], pair[1]);
}
}
else {
fuse.Object.each(object, function(value, key) {
setValue(hash, key, value);
});
}
return hash;
}
function unsetByIndex(hash, index) {
var keys = hash._keys;
delete hash._data[fuse.uid + keys[index]];
delete hash._object[keys[index]];
keys.splice(index, 1);
hash._pairs.splice(index, 1);
hash._values.splice(index, 1);
}
fuse.Class({ 'constructor': Hash, 'merge': merge, 'set': set, 'unset': unset });
Klass.prototype = Hash.plugin;
return Hash;
})();
fuse.Hash.from = fuse.Hash;
/*--------------------------------------------------------------------------*/
(function(plugin) {
var ORIGIN = '__origin__';
function clear() {
this._data = { };
this._object = { };
this._keys = fuse.Array();
this._pairs = fuse.Array();
this._values = fuse.Array();
return this;
}
function clone(deep) {
var result,
pair,
pairs,
i = -1,
origin = clone[ORIGIN];
if (deep) {
result = origin.Hash();
pairs = this._pairs;
while (pair = pairs[++i]) {
result.set(pair[0], origin.Object.clone(pair[1], deep));
}
} else {
result = origin.Hash(this);
}
return result;
}
function get(key) {
return this._data[fuse.uid + key];
}
function hasKey(key) {
return (fuse.uid + key) in this._data;
}
function keyOf(value) {
var pair,
i = -1,
pairs = this._pairs;
while (pair = pairs[++i]) {
if (value === pair[1])
return pair[0];
}
return keyOf[ORIGIN].Number(-1);
}
function keys() {
return keys[ORIGIN].Array.fromArray(this._keys);
}
function toObject() {
var pair,
i = -1,
pairs = this._pairs,
result = toObject[ORIGIN].Object();
while (pair = pairs[++i]) {
result[pair[0]] = pair[1];
}
return result;
}
function values() {
return values[ORIGIN].Array.fromArray(this._values);
}
/*------------------------------------------------------------------------*/
/* create optimized enumerable equivalents */
function contains(value) {
var item,
pair,
i = -1,
pairs = this._pairs;
while (pair = pairs[++i]) {
// basic strict match
if ((item = pair[1]) === value) return true;
// match String and Number object instances
try {
if (item.valueOf() === value.valueOf()) {
return true;
}
} catch (e) { }
}
return false;
}
function filter(callback, thisArg) {
var key,
pair,
value,
i = -1,
pairs = this._pairs,
result = this.constructor();
if (typeof callback != 'function') {
throw new TypeError;
}
while (pair = pairs[++i]) {
if (callback.call(thisArg, value = pair[1], key = pair[0], this)) {
result.set(key, value);
}
}
return result;
}
function first(callback, thisArg) {
var count,
pair,
result,
i = -1,
p = fuse._,
pairs = this._pairs;
if (callback == null) {
if (pairs.length) return p.returnPair(pairs[0]);
}
else if (typeof callback == 'function') {
while (pair = pairs[++i]) {
if (callback.call(thisArg, pair[1], pair[0], this)) {
return p.returnPair(pair);
}
}
}
else {
count = +callback;
result = first[ORIGIN].Array();
if (!isNaN(count)) {
count = count < 1 ? 1 : count;
while (++i < count && (pair = pairs[i])) {
result[i] = p.returnPair(pair);
}
}
return result;
}
}
function last(callback, thisArg) {
var count,
pad,
pair,
result,
i = -1,
p = fuse._,
pairs = this._pairs,
length = pairs.length;
if (callback == null) {
if (length) {
return p.returnPair(this._pairs.last());
}
}
else if (typeof callback == 'function') {
while (length--) {
pair = pairs[length];
if (callback.call(thisArg, pair[1], pair[2], this)) {
return p.returnPair(pair);
}
}
}
else {
count = +callback;
result = last[ORIGIN].Array();
if (!isNaN(count)) {
count = count < 1 ? 1 : count > length ? length : count;
pad = length - count;
while (++i < count) {
result[i] = p.returnPair(pairs[pad + i]);
}
}
return result;
}
}
function map(callback, thisArg) {
var key,
pair,
i = -1,
pairs = this._pairs,
result = this.constructor();
if (typeof callback != 'function') {
throw new TypeError;
}
while (pair = pairs[++i]) {
result.set(key = pair[0], callback.call(thisArg, pair[1], key, this));
}
return result;
}
function partition(callback, thisArg) {
callback || (callback = fuse.Function.IDENTITY);
var key,
value,
pair,
i = -1,
origin = partition[ORIGIN],
pairs = this._pairs,
trues = origin.Hash(),
falses = origin.Hash();
while (pair = pairs[++i]) {
(callback.call(thisArg, value = pair[1], key = pair[0], this) ?
trues : falses).set(key, value);
}
return origin.Array(trues, falses);
}
function size() {
return size[ORIGIN].Number(this._keys.length);
}
function toArray() {
return toArray[ORIGIN].Array.fromArray(this._pairs);
}
function zip() {
var j,
key,
length,
pair,
pairs,
values,
i = -1,
origin = zip[ORIGIN],
hashes = [this],
pairs = this._pairs,
args = hashes.slice.call(arguments, 0),
callback = fuse.Function.IDENTITY,
result = origin.Hash();
// if last argument is a function it is the callback
if (typeof args[args.length - 1] == 'function') {
callback = args.pop();
}
length = args.length;
while (length--) {
hashes[length + 1] = origin.Hash(args[length]);
}
length = hashes.length;
while (pair = pairs[++i]) {
j = -1; values = origin.Array(); key = pair[0];
while (++j < length) {
values[j] = hashes[j]._data[fuse.uid + key];
}
result.set(key, callback(values, key, this));
}
return result;
}
/*------------------------------------------------------------------------*/
plugin.clear = clear;
plugin.contains = contains;
plugin.filter = filter;
plugin.get = get;
plugin.hasKey = hasKey;
plugin.map = map;
(plugin.clone = clone)[ORIGIN] =
(plugin.first = first)[ORIGIN] =
(plugin.keyOf = keyOf)[ORIGIN] =
(plugin.keys = keys)[ORIGIN] =
(plugin.last = last)[ORIGIN] =
(plugin.partition = partition)[ORIGIN] =
(plugin.size = size)[ORIGIN] =
(plugin.toArray = toArray)[ORIGIN] =
(plugin.toObject = toObject)[ORIGIN] =
(plugin.values = values)[ORIGIN] =
(plugin.zip = zip)[ORIGIN] = fuse;
})(fuse.Hash.plugin);
| mit |
tullyhansen/botwiki.org | vendor/composer/autoload_namespaces.php | 333 | <?php
// autoload_namespaces.php @generated by Composer
$vendorDir = dirname(dirname(__FILE__));
$baseDir = dirname($vendorDir);
return array(
'Twig_' => array($vendorDir . '/twig/twig/lib'),
'ParsedownExtra' => array($vendorDir . '/erusev/parsedown-extra'),
'Parsedown' => array($vendorDir . '/erusev/parsedown'),
);
| mit |
xsmart/opencvr | 3rdparty/live555/liveMedia/VideoRTPSink.cpp | 1364 | /**********
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the
Free Software Foundation; either version 2.1 of the License, or (at your
option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)
This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
more details.
You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
**********/
// "liveMedia"
// Copyright (c) 1996-2016 Live Networks, Inc. All rights reserved.
// A generic RTP sink for video codecs (abstract base class)
// Implementation
#include "VideoRTPSink.hh"
VideoRTPSink::VideoRTPSink(UsageEnvironment& env,
Groupsock* rtpgs, unsigned char rtpPayloadType,
unsigned rtpTimestampFrequency,
char const* rtpPayloadFormatName)
: MultiFramedRTPSink(env, rtpgs, rtpPayloadType, rtpTimestampFrequency,
rtpPayloadFormatName) {
}
VideoRTPSink::~VideoRTPSink() {
}
char const* VideoRTPSink::sdpMediaType() const {
return "video";
}
| mit |
steedos/pushd | node_modules/node-gcm/lib/constants.js | 1487 | /*!
* node-gcm
* Copyright(c) 2013 Marcus Farkas <toothlessgear@finitebox.com>
* MIT Licensed
*/
var Constants = {
'GCM_SEND_ENDPOINT' : 'android.googleapis.com',
'GCM_SEND_ENDPATH' : '/gcm/send',
'PARAM_REGISTRATION_ID' : 'registration_id',
'PARAM_COLLAPSE_KEY' : 'collapse_key',
'PARAM_DELAY_WHILE_IDLE' : 'delay_while_idle',
'PARAM_PAYLOAD_KEY' : 'data',
'PARAM_TIME_TO_LIVE' : 'time_to_live',
'ERROR_QUOTA_EXCEEDED' : 'QuotaExceeded',
'ERROR_DEVICE_QUOTA_EXCEEDED' : 'DeviceQuotaExceeded',
'ERROR_MISSING_REGISTRATION' : 'MissingRegistration',
'ERROR_INVALID_REGISTRATION' : 'InvalidRegistration',
'ERROR_MISMATCH_SENDER_ID' : 'MismatchSenderId',
'ERROR_NOT_REGISTERED' : 'NotRegistered',
'ERROR_MESSAGE_TOO_BIG' : 'MessageTooBig',
'ERROR_MISSING_COLLAPSE_KEY' : 'MissingCollapseKey',
'ERROR_UNAVAILABLE' : 'Unavailable',
'TOKEN_MESSAGE_ID' : 'id',
'TOKEN_CANONICAL_REG_ID' : 'registration_id',
'TOKEN_ERROR' : 'Error',
'JSON_REGISTRATION_IDS' : 'registration_ids',
'JSON_PAYLOAD' : 'data',
'JSON_SUCCESS' : 'success',
'JSON_FAILURE' : 'failure',
'JSON_CANONICAL_IDS' : 'canonical_ids',
'JSON_MULTICAST_ID' : 'multicast_id',
'JSON_RESULTS' : 'results',
'JSON_ERROR' : 'error',
'JSON_MESSAGE_ID' : 'message_id',
'UTF8' : 'UTF-8',
'BACKOFF_INITIAL_DELAY' : 1000,
'MAX_BACKOFF_DELAY' : 1024000
};
module.exports = Constants;
| mit |