code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
/* * Copyright 2012 the original author or authors * @license MIT, see LICENSE.txt for details * * @author Scott Andrews */ (function (define) { 'use strict'; var undef; /** * Aggregate messages into batches as they are received. */ define(function (require) { var msgs = require('..'); /** * Aggregates messages into batches as they are received. Batches may * be chunked either by an absolute size and/or a timeout since the * first message was received for the chunk. Either a batch size or * timeout must be specified. * * @param {string} [name] the name to register the aggregator as * @param {number} [opts.batch=0] absolute size of a chunk. If <=0, * batch size is not a factor * @param {number} [opts.timeout=0] number of milliseconds since the * first message arrived to queue the chunk. If <=0, timeout is not a * factor * @param {string|Channel} [opts.output] the channel to post the * aggregated messages to * @param {string|Channel} [opts.input] the channel to receive message * from * @param {string|Channel} [opts.error] channel to receive errors * @returns the aggregator * @throws on invalid configuration, batch size or timeout is required */ msgs.prototype.batchingAggregator = msgs.utils.optionalName(function batchingAggregator(name, opts) { var timeout, batch; batch = []; opts = opts || {}; opts.batch = opts.batch || 0; opts.timeout = opts.timeout || 0; if (opts.batch <= 0 && opts.timeout <= 0) { throw new Error('Invalid configuration: batch size or timeout must be defined'); } function releaseHelper(release) { release(batch); batch = []; clearTimeout(timeout); timeout = undef; } return this.aggregator(name, function (message, release) { batch.push(message.payload); if (opts.batch > 0 && batch.length >= opts.batch) { releaseHelper(release); } else if (!timeout && opts.timeout > 0) { timeout = setTimeout(function () { releaseHelper(release); }, opts.timeout); } }, opts); }); return msgs; }); }( typeof define === 'function' && define.amd ? define : function (factory) { module.exports = factory(require); } // Boilerplate for AMD and Node ));
timveil/iot-truck-streaming
storm-demo-webapp/src/main/webapp/assets/lib/msgs/aggregators/batching.js
JavaScript
apache-2.0
2,277
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.io.stream; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; import java.io.EOFException; import java.io.IOException; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.iterableWithSize; public class StreamTests extends ESTestCase { public void testBooleanSerialization() throws IOException { final BytesStreamOutput output = new BytesStreamOutput(); output.writeBoolean(false); output.writeBoolean(true); final BytesReference bytesReference = output.bytes(); final BytesRef bytesRef = bytesReference.toBytesRef(); assertThat(bytesRef.length, equalTo(2)); final byte[] bytes = bytesRef.bytes; assertThat(bytes[0], equalTo((byte) 0)); assertThat(bytes[1], equalTo((byte) 1)); final StreamInput input = bytesReference.streamInput(); assertFalse(input.readBoolean()); assertTrue(input.readBoolean()); final Set<Byte> set = IntStream.range(Byte.MIN_VALUE, Byte.MAX_VALUE).mapToObj(v -> (byte) v).collect(Collectors.toSet()); set.remove((byte) 0); set.remove((byte) 1); final byte[] corruptBytes = new byte[]{randomFrom(set)}; final BytesReference corrupt = new BytesArray(corruptBytes); final IllegalStateException e = expectThrows(IllegalStateException.class, () -> corrupt.streamInput().readBoolean()); final String message = String.format(Locale.ROOT, "unexpected byte [0x%02x]", corruptBytes[0]); assertThat(e, hasToString(containsString(message))); } public void testOptionalBooleanSerialization() throws IOException { final BytesStreamOutput output = new BytesStreamOutput(); output.writeOptionalBoolean(false); output.writeOptionalBoolean(true); output.writeOptionalBoolean(null); final BytesReference bytesReference = output.bytes(); final BytesRef bytesRef = bytesReference.toBytesRef(); assertThat(bytesRef.length, equalTo(3)); final byte[] bytes = bytesRef.bytes; assertThat(bytes[0], equalTo((byte) 0)); assertThat(bytes[1], equalTo((byte) 1)); assertThat(bytes[2], equalTo((byte) 2)); final StreamInput input = bytesReference.streamInput(); final Boolean maybeFalse = input.readOptionalBoolean(); assertNotNull(maybeFalse); assertFalse(maybeFalse); final Boolean maybeTrue = input.readOptionalBoolean(); assertNotNull(maybeTrue); assertTrue(maybeTrue); assertNull(input.readOptionalBoolean()); final Set<Byte> set = IntStream.range(Byte.MIN_VALUE, Byte.MAX_VALUE).mapToObj(v -> (byte) v).collect(Collectors.toSet()); set.remove((byte) 0); set.remove((byte) 1); set.remove((byte) 2); final byte[] corruptBytes = new byte[]{randomFrom(set)}; final BytesReference corrupt = new BytesArray(corruptBytes); final IllegalStateException e = expectThrows(IllegalStateException.class, () -> corrupt.streamInput().readOptionalBoolean()); final String message = String.format(Locale.ROOT, "unexpected byte [0x%02x]", corruptBytes[0]); assertThat(e, hasToString(containsString(message))); } public void testRandomVLongSerialization() throws IOException { for (int i = 0; i < 1024; i++) { long write = randomLong(); BytesStreamOutput out = new BytesStreamOutput(); out.writeZLong(write); long read = out.bytes().streamInput().readZLong(); assertEquals(write, read); } } public void testSpecificVLongSerialization() throws IOException { List<Tuple<Long, byte[]>> values = Arrays.asList( new Tuple<>(0L, new byte[]{0}), new Tuple<>(-1L, new byte[]{1}), new Tuple<>(1L, new byte[]{2}), new Tuple<>(-2L, new byte[]{3}), new Tuple<>(2L, new byte[]{4}), new Tuple<>(Long.MIN_VALUE, new byte[]{-1, -1, -1, -1, -1, -1, -1, -1, -1, 1}), new Tuple<>(Long.MAX_VALUE, new byte[]{-2, -1, -1, -1, -1, -1, -1, -1, -1, 1}) ); for (Tuple<Long, byte[]> value : values) { BytesStreamOutput out = new BytesStreamOutput(); out.writeZLong(value.v1()); assertArrayEquals(Long.toString(value.v1()), value.v2(), BytesReference.toBytes(out.bytes())); BytesReference bytes = new BytesArray(value.v2()); assertEquals(Arrays.toString(value.v2()), (long) value.v1(), bytes.streamInput().readZLong()); } } public void testLinkedHashMap() throws IOException { int size = randomIntBetween(1, 1024); boolean accessOrder = randomBoolean(); List<Tuple<String, Integer>> list = new ArrayList<>(size); LinkedHashMap<String, Integer> write = new LinkedHashMap<>(size, 0.75f, accessOrder); for (int i = 0; i < size; i++) { int value = randomInt(); list.add(new Tuple<>(Integer.toString(i), value)); write.put(Integer.toString(i), value); } if (accessOrder) { // randomize access order Collections.shuffle(list, random()); for (Tuple<String, Integer> entry : list) { // touch the entries to set the access order write.get(entry.v1()); } } BytesStreamOutput out = new BytesStreamOutput(); out.writeGenericValue(write); LinkedHashMap<String, Integer> read = (LinkedHashMap<String, Integer>) out.bytes().streamInput().readGenericValue(); assertEquals(size, read.size()); int index = 0; for (Map.Entry<String, Integer> entry : read.entrySet()) { assertEquals(list.get(index).v1(), entry.getKey()); assertEquals(list.get(index).v2(), entry.getValue()); index++; } } public void testFilterStreamInputDelegatesAvailable() throws IOException { final int length = randomIntBetween(1, 1024); StreamInput delegate = StreamInput.wrap(new byte[length]); FilterStreamInput filterInputStream = new FilterStreamInput(delegate) { }; assertEquals(filterInputStream.available(), length); // read some bytes final int bytesToRead = randomIntBetween(1, length); filterInputStream.readBytes(new byte[bytesToRead], 0, bytesToRead); assertEquals(filterInputStream.available(), length - bytesToRead); } public void testInputStreamStreamInputDelegatesAvailable() throws IOException { final int length = randomIntBetween(1, 1024); ByteArrayInputStream is = new ByteArrayInputStream(new byte[length]); InputStreamStreamInput streamInput = new InputStreamStreamInput(is); assertEquals(streamInput.available(), length); // read some bytes final int bytesToRead = randomIntBetween(1, length); streamInput.readBytes(new byte[bytesToRead], 0, bytesToRead); assertEquals(streamInput.available(), length - bytesToRead); } public void testReadArraySize() throws IOException { BytesStreamOutput stream = new BytesStreamOutput(); byte[] array = new byte[randomIntBetween(1, 10)]; for (int i = 0; i < array.length; i++) { array[i] = randomByte(); } stream.writeByteArray(array); InputStreamStreamInput streamInput = new InputStreamStreamInput(StreamInput.wrap(BytesReference.toBytes(stream.bytes())), array .length - 1); expectThrows(EOFException.class, streamInput::readByteArray); streamInput = new InputStreamStreamInput(StreamInput.wrap(BytesReference.toBytes(stream.bytes())), BytesReference.toBytes(stream .bytes()).length); assertArrayEquals(array, streamInput.readByteArray()); } public void testWritableArrays() throws IOException { final String[] strings = generateRandomStringArray(10, 10, false, true); WriteableString[] sourceArray = Arrays.stream(strings).<WriteableString>map(WriteableString::new).toArray(WriteableString[]::new); WriteableString[] targetArray; BytesStreamOutput out = new BytesStreamOutput(); if (randomBoolean()) { if (randomBoolean()) { sourceArray = null; } out.writeOptionalArray(sourceArray); targetArray = out.bytes().streamInput().readOptionalArray(WriteableString::new, WriteableString[]::new); } else { out.writeArray(sourceArray); targetArray = out.bytes().streamInput().readArray(WriteableString::new, WriteableString[]::new); } assertThat(targetArray, equalTo(sourceArray)); } public void testArrays() throws IOException { final String[] strings; final String[] deserialized; Writeable.Writer<String> writer = StreamOutput::writeString; Writeable.Reader<String> reader = StreamInput::readString; BytesStreamOutput out = new BytesStreamOutput(); if (randomBoolean()) { if (randomBoolean()) { strings = null; } else { strings = generateRandomStringArray(10, 10, false, true); } out.writeOptionalArray(writer, strings); deserialized = out.bytes().streamInput().readOptionalArray(reader, String[]::new); } else { strings = generateRandomStringArray(10, 10, false, true); out.writeArray(writer, strings); deserialized = out.bytes().streamInput().readArray(reader, String[]::new); } assertThat(deserialized, equalTo(strings)); } public void testCollection() throws IOException { class FooBar implements Writeable { private final int foo; private final int bar; private FooBar(final int foo, final int bar) { this.foo = foo; this.bar = bar; } private FooBar(final StreamInput in) throws IOException { this.foo = in.readInt(); this.bar = in.readInt(); } @Override public void writeTo(final StreamOutput out) throws IOException { out.writeInt(foo); out.writeInt(bar); } @Override public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final FooBar that = (FooBar) o; return foo == that.foo && bar == that.bar; } @Override public int hashCode() { return Objects.hash(foo, bar); } } runWriteReadCollectionTest( () -> new FooBar(randomInt(), randomInt()), StreamOutput::writeCollection, in -> in.readList(FooBar::new)); } public void testStringCollection() throws IOException { runWriteReadCollectionTest(() -> randomUnicodeOfLength(16), StreamOutput::writeStringCollection, StreamInput::readStringList); } private <T> void runWriteReadCollectionTest( final Supplier<T> supplier, final CheckedBiConsumer<StreamOutput, Collection<T>, IOException> writer, final CheckedFunction<StreamInput, Collection<T>, IOException> reader) throws IOException { final int length = randomIntBetween(0, 10); final Collection<T> collection = new ArrayList<>(length); for (int i = 0; i < length; i++) { collection.add(supplier.get()); } try (BytesStreamOutput out = new BytesStreamOutput()) { writer.accept(out, collection); try (StreamInput in = out.bytes().streamInput()) { assertThat(collection, equalTo(reader.apply(in))); } } } public void testSetOfLongs() throws IOException { final int size = randomIntBetween(0, 6); final Set<Long> sourceSet = new HashSet<>(size); for (int i = 0; i < size; i++) { sourceSet.add(randomLongBetween(i * 1000, (i + 1) * 1000 - 1)); } assertThat(sourceSet, iterableWithSize(size)); final BytesStreamOutput out = new BytesStreamOutput(); out.writeCollection(sourceSet, StreamOutput::writeLong); final Set<Long> targetSet = out.bytes().streamInput().readSet(StreamInput::readLong); assertThat(targetSet, equalTo(sourceSet)); } public void testInstantSerialization() throws IOException { final Instant instant = Instant.now(); try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeInstant(instant); try (StreamInput in = out.bytes().streamInput()) { final Instant serialized = in.readInstant(); assertEquals(instant, serialized); } } } public void testOptionalInstantSerialization() throws IOException { final Instant instant = Instant.now(); try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeOptionalInstant(instant); try (StreamInput in = out.bytes().streamInput()) { final Instant serialized = in.readOptionalInstant(); assertEquals(instant, serialized); } } final Instant missing = null; try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeOptionalInstant(missing); try (StreamInput in = out.bytes().streamInput()) { final Instant serialized = in.readOptionalInstant(); assertEquals(missing, serialized); } } } static final class WriteableString implements Writeable { final String string; WriteableString(String string) { this.string = string; } WriteableString(StreamInput in) throws IOException { this(in.readString()); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } WriteableString that = (WriteableString) o; return string.equals(that.string); } @Override public int hashCode() { return string.hashCode(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(string); } } }
strapdata/elassandra
server/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java
Java
apache-2.0
16,395
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.session; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; import org.springframework.data.redis.connection.RedisConnectionFactory; import org.springframework.data.redis.core.RedisTemplate; import org.springframework.session.SessionRepository; import org.springframework.session.data.redis.RedisOperationsSessionRepository; import org.springframework.session.data.redis.config.annotation.web.http.RedisHttpSessionConfiguration; /** * Redis backed session configuration. * * @author Andy Wilkinson * @author Tommy Ludwig * @author Eddú Meléndez * @author Stephane Nicoll * @author Vedran Pavic */ @Configuration @ConditionalOnClass({ RedisTemplate.class, RedisOperationsSessionRepository.class }) @ConditionalOnMissingBean(SessionRepository.class) @ConditionalOnBean(RedisConnectionFactory.class) @Conditional(SessionCondition.class) @EnableConfigurationProperties(RedisSessionProperties.class) class RedisSessionConfiguration { @Configuration public static class SpringBootRedisHttpSessionConfiguration extends RedisHttpSessionConfiguration { private SessionProperties sessionProperties; @Autowired public void customize(SessionProperties sessionProperties, RedisSessionProperties redisSessionProperties) { this.sessionProperties = sessionProperties; Integer timeout = this.sessionProperties.getTimeout(); if (timeout != null) { setMaxInactiveIntervalInSeconds(timeout); } setRedisNamespace(redisSessionProperties.getNamespace()); setRedisFlushMode(redisSessionProperties.getFlushMode()); } } }
bbrouwer/spring-boot
spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/session/RedisSessionConfiguration.java
Java
apache-2.0
2,669
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.operators.spec; import java.util.Collection; /** * Spec for stateful operators. */ public interface StatefulOperatorSpec { /** * Get the store descriptors for stores required by this operator. * * @return store descriptors for this operator's stores */ Collection<StoreDescriptor> getStoreDescriptors(); }
fredji97/samza
samza-core/src/main/java/org/apache/samza/operators/spec/StatefulOperatorSpec.java
Java
apache-2.0
1,163
# frozen_string_literal: true # # Cookbook:: postgresql # Recipe:: contrib # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # db_name = node['postgresql']['database_name'] # Install the PostgreSQL contrib package(s) from the distribution, # as specified by the node attributes. package node['postgresql']['contrib']['packages'] include_recipe 'postgresql::server' # Install PostgreSQL contrib extentions into the database, as specified by the # node attribute node['postgresql']['database_name']. if node['postgresql']['contrib'].attribute?('extensions') node['postgresql']['contrib']['extensions'].each do |pg_ext| postgresql_extension "#{db_name}/#{pg_ext}" end end
Coveros/starcanada2016
www-db/cookbooks/postgresql/recipes/contrib.rb
Ruby
apache-2.0
1,177
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.core.client.lexicoder; /** * An encoder represents a typed object that can be encoded/decoded to/from a byte array. * * @since 1.6.0 */ public interface Encoder<T> { byte[] encode(T object); T decode(byte[] bytes) throws IllegalArgumentException; }
milleruntime/accumulo
core/src/main/java/org/apache/accumulo/core/client/lexicoder/Encoder.java
Java
apache-2.0
1,095
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.catalog.springboot; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import org.apache.camel.catalog.CamelCatalog; import org.apache.camel.catalog.CatalogHelper; import org.apache.camel.catalog.RuntimeProvider; /** * A Spring Boot based {@link RuntimeProvider} which only includes the supported Camel components, data formats, and languages * which can be installed in Spring Boot using the starter dependencies. */ public class SpringBootRuntimeProvider implements RuntimeProvider { private static final String COMPONENT_DIR = "org/apache/camel/catalog/springboot/components"; private static final String DATAFORMAT_DIR = "org/apache/camel/catalog/springboot/dataformats"; private static final String LANGUAGE_DIR = "org/apache/camel/catalog/springboot/languages"; private static final String OTHER_DIR = "org/apache/camel/catalog/springboot/others"; private static final String COMPONENTS_CATALOG = "org/apache/camel/catalog/springboot/components.properties"; private static final String DATA_FORMATS_CATALOG = "org/apache/camel/catalog/springboot/dataformats.properties"; private static final String LANGUAGE_CATALOG = "org/apache/camel/catalog/springboot/languages.properties"; private static final String OTHER_CATALOG = "org/apache/camel/catalog/springboot/others.properties"; private CamelCatalog camelCatalog; @Override public CamelCatalog getCamelCatalog() { return camelCatalog; } @Override public void setCamelCatalog(CamelCatalog camelCatalog) { this.camelCatalog = camelCatalog; } @Override public String getProviderName() { return "springboot"; } @Override public String getProviderGroupId() { return "org.apache.camel"; } @Override public String getProviderArtifactId() { return "camel-catalog-provider-springboot"; } @Override public String getComponentJSonSchemaDirectory() { return COMPONENT_DIR; } @Override public String getDataFormatJSonSchemaDirectory() { return DATAFORMAT_DIR; } @Override public String getLanguageJSonSchemaDirectory() { return LANGUAGE_DIR; } @Override public String getOtherJSonSchemaDirectory() { return OTHER_DIR; } @Override public List<String> findComponentNames() { List<String> names = new ArrayList<>(); InputStream is = camelCatalog.getVersionManager().getResourceAsStream(COMPONENTS_CATALOG); if (is != null) { try { CatalogHelper.loadLines(is, names); } catch (IOException e) { // ignore } } return names; } @Override public List<String> findDataFormatNames() { List<String> names = new ArrayList<>(); InputStream is = camelCatalog.getVersionManager().getResourceAsStream(DATA_FORMATS_CATALOG); if (is != null) { try { CatalogHelper.loadLines(is, names); } catch (IOException e) { // ignore } } return names; } @Override public List<String> findLanguageNames() { List<String> names = new ArrayList<>(); InputStream is = camelCatalog.getVersionManager().getResourceAsStream(LANGUAGE_CATALOG); if (is != null) { try { CatalogHelper.loadLines(is, names); } catch (IOException e) { // ignore } } return names; } @Override public List<String> findOtherNames() { List<String> names = new ArrayList<>(); InputStream is = camelCatalog.getVersionManager().getResourceAsStream(OTHER_CATALOG); if (is != null) { try { CatalogHelper.loadLines(is, names); } catch (IOException e) { // ignore } } return names; } }
onders86/camel
platforms/camel-catalog-provider-springboot/src/main/java/org/apache/camel/catalog/springboot/SpringBootRuntimeProvider.java
Java
apache-2.0
4,859
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore.events; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hive.metastore.IHMSHandler; import org.apache.hadoop.hive.metastore.api.Database; /** * Database read event */ @InterfaceAudience.Public @InterfaceStability.Stable public class PreReadDatabaseEvent extends PreEventContext { private final Database db; public PreReadDatabaseEvent(Database db, IHMSHandler handler) { super(PreEventType.READ_DATABASE, handler); this.db = db; } /** * @return the db */ public Database getDatabase() { return db; } }
alanfgates/hive
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/events/PreReadDatabaseEvent.java
Java
apache-2.0
1,483
#!/usr/bin/env python # Line too long - pylint: disable=C0301 # Invalid name - pylint: disable=C0103 """ parseutils.py Routines to parse "flexible" configuration files for tools like gpaddmirrors, gprecoverseg, gpexpand, etc. Copyright (c) EMC/Greenplum Inc 2011. All Rights Reserved. """ import sys from gppylib.mainUtils import ExceptionNoStackTraceNeeded from gppylib.gplog import get_default_logger, logging_is_verbose logger = get_default_logger() def caller(): "Return name of calling function" if logging_is_verbose(): return sys._getframe(1).f_code.co_name + '()' return '' def canonicalize_address(addr): """ Encases addresses in [ ] per RFC 2732. Generally used to deal with ':' characters which are also often used as delimiters. Returns the addr string if it doesn't contain any ':' characters. If addr contains ':' and also contains a '[' then the addr string is simply returned under the assumption that it is already escaped as needed. Otherwise return a new string from addr by adding '[' prefix and ']' suffix. Examples -------- >>> canonicalize_address('myhost') 'myhost' >>> canonicalize_address('127.0.0.1') '127.0.0.1' >>> canonicalize_address('::1') '[::1]' >>> canonicalize_address('[::1]') '[::1]' >>> canonicalize_address('2620:0:170:610::13') '[2620:0:170:610::13]' >>> canonicalize_address('[2620:0:170:610::13]') '[2620:0:170:610::13]' @param addr: the address to possibly encase in [ ] @returns: the addresss, encased in [] if necessary """ if ':' not in addr: return addr if '[' in addr: return addr return '[' + addr + ']' # # line parsing # def consume_to(delimiter, rest): """ Consume characters from rest string until we encounter the delimiter. Returns (None, after, None) where after are the characters after delimiter or (None, rest, 'does not contain '+delimiter) when delimiter is not encountered. Examples -------- >>> consume_to('=', 'abc=def:ghi') (None, 'def:ghi', None) @param delimiter: the delimiter string @param rest: the string to read such as 'abc:def:ghi' @returns: (None, after, None) tuple such as (None, 'def:ghi', None) """ p = rest.find(delimiter) if p < 0: return None, rest, 'does not contain '+delimiter return None, rest[p+1:], None def read_to(delimiter, rest): """ Read characters from rest string until we encounter the delimiter. Separate the string into characters 'before' and 'after' the delimiter. If no delimiter is found, assign entire string to 'before' and None to 'after'. Examples -------- >>> read_to(':', 'abc:def:ghi') ('abc', 'def:ghi', None) >>> read_to(':', 'abc:def') ('abc', 'def', None) >>> read_to(':', 'abc') ('abc', None, None) >>> read_to(':', '') ('', None, None) Note this returns a 3-tuple for compatibility with other routines which use the third element as an error message @param delimiter: the delimiter string @param rest: the string to read such as 'abc:def:ghi' @returns: (before, after, None) tuple such as ('abc', 'def:ghi', None) """ p = rest.find(delimiter) if p < 0: return rest, None, None return rest[0:p], rest[p+1:], None def read_to_bracketed(delimiter, rest): """ Read characters from rest string which is expected to start with a '['. If rest does not start with '[', return a tuple (None, rest, 'does not begin with ['). If rest string starts with a '[', then read until we find ']'. If no ']' is found, return a tuple (None, rest, 'does not contain ending ]'). Otherwise separate the string into 'before' representing characters between '[' and ']' and 'after' representing characters after the ']' and then check that the first character found after the ']' is a :'. If there are no characters after the ']', return a tuple (before, None, None) where before contains the characters between '[' and ']'. If there are characters after ']' other than the delimiter, return a tuple (None, rest, 'characters not allowed after ending ]') Otherwise return a tuple (before, after, None) where before contains the characters between '[' and ']' and after contains the characters after the ']:'. This function avoids raising Exceptions for these particular cases of malformed input since they are easier to report in the calling function. Examples -------- >>> read_to_bracketed(':', '[abc:def]') ('abc:def', None, None) >>> read_to_bracketed(':', '[abc]:def:ghi') ('abc', 'def:ghi', None) >>> read_to_bracketed(':', '[abc:def]:ghi:jkl') ('abc:def', 'ghi:jkl', None) >>> read_to_bracketed(':', 'abc:def:ghi:jkl') (None, 'abc:def:ghi:jkl', 'does not begin with [') >>> read_to_bracketed(':', '[abc:def:ghi:jkl') (None, '[abc:def:ghi:jkl', 'does not contain ending ]') >>> read_to_bracketed(':', '[abc]extra:def:ghi:jkl') (None, '[abc]extra:def:ghi:jkl', 'characters not allowed after ending ]') @param delimiter: the delimiter string @param rest: the string to read such as '[abc:def]:ghi' @returns: (before, after, reason) tuple such as ('abc:def', 'ghi', None) """ if not rest.startswith('['): return None, rest, 'does not begin with [' p = rest.find(']') if p < 0: return None, rest, 'does not contain ending ]' if len(rest[p+1:]) < 1: return rest[1:p], None, None if rest[p+1] != delimiter: return None, rest, 'characters not allowed after ending ]' return rest[1:p], rest[p+2:], None def read_to_possibly_bracketed(delimiter, rest): """ Behave as read_bracketed above when rest starts with a '[', otherwise as read_to_colon. This is intended to support fields which may contain an IPv6 address, an IPv4 address or a hostname. Examples -------- >>> read_to_possibly_bracketed(':', 'abc:def:ghi') ('abc', 'def:ghi', None) >>> read_to_possibly_bracketed(':', '[abc]:def:ghi') ('abc', 'def:ghi', None) >>> read_to_possibly_bracketed(':', '[abc:def]:ghi') ('abc:def', 'ghi', None) >>> read_to_possibly_bracketed(':', '[]:ghi') ('', 'ghi', None) >>> read_to_possibly_bracketed(':', ':ghi') ('', 'ghi', None) >>> read_to_possibly_bracketed(':', '[ghi]') ('ghi', None, None) >>> read_to_possibly_bracketed(':', '[]') ('', None, None) >>> read_to_possibly_bracketed(':', '') ('', None, None) @param delimiter: the delimiter string @param rest: the string to read such as '[abc:def]:ghi' @returns: (before, after, reason) tuple such as ('abc:def', 'ghi', None) """ if rest.startswith('['): return read_to_bracketed(delimiter, rest) return read_to(delimiter, rest) class LineParser: """ Manage state to parse a single line, generally from a configuration file with fields delimited by colons. """ def __init__(self, caller, filename, lineno, line): "Initialize" (self.caller, self.filename, self.lineno, self.line, self.rest, self.error) = (caller, filename, lineno, line, line, None) self.logger = logger if logging_is_verbose(): self.logger.debug("%s:%s" % (filename, lineno)) def ensure_more_to_process(self, name): "Raise an exception if we've exhausted the input line" if self.rest is None: msg = "out of values (reading %s)" % name raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (self.filename, self.lineno, self.caller, self.line, msg)) def read_delimited_field(self, delimiter, name="next field", reader=read_to): """ Attempts to read the next field in the line up to the specified delimiter using the specified reading method, raising any error encountered as an exception. Returns the read field when successful. """ self.ensure_more_to_process(name) value, self.rest, error = reader(delimiter, self.rest) if error is not None: msg = "%s (reading %s) >>%s" % (error, name, self.rest) raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (self.filename, self.lineno, self.caller, self.line, msg)) if logging_is_verbose(): self.logger.debug(" name=%-30s delimiter='%s' value=%s" % (name, delimiter, value)) return value def does_starts_with(self, expected): "Returns true if line starts with expected value, or return false" return self.line.startswith(expected) def ensure_starts_with(self, expected): "Returns true if line starts with expected value, or raise an exception otherwise" if not self.does_starts_with(expected): msg = "does not start with %s" % expected raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (self.filename, self.lineno, self.caller, self.line, msg)) self.rest = self.rest[len(expected):] def handle_field(self, name, dst=None, delimiter=':', stripchars=None): """ Attempts to read the next field up to a given delimiter. Names starting with '[' indicate that the field should use the bracketed parsing logic. If dst is not none, also assigns the value to dst[name]. If stripchars is not none, value is first stripped of leading and trailing stripchars. """ if name[0] == '[': name = name.strip('[]') value = self.read_delimited_field(delimiter, name, read_to_possibly_bracketed) else: value = self.read_delimited_field(delimiter, name) if stripchars is not None: value = value.strip(stripchars) if dst is not None: dst[name] = value return value # # file parsing # def line_reader(f): """ Read the contents of the given input, generating the non-blank non-comment lines found within as a series of tuples of the form (line number, line). >>> [l for l in line_reader(['', '# test', 'abc:def'])] [(3, 'abc:def')] """ for offset, line in enumerate(f): line = line.strip() if len(line) < 1 or line[0] == '#': continue yield offset+1, line ################ # gpfilespace format # # First line in file is the filespace name, remaining lines are # specify hostname, dbid, and a path: # # filespace:name # hostname:dbid:path # ... ################ def parse_fspacename(filename, lineno, line): """ Parse the filespace: line which appears at the beginning of the gpfilespace configuration file. >>> parse_fspacename('file', 1, 'filespace:blah') 'blah' """ p = LineParser(caller(), filename, lineno, line) p.ensure_starts_with('filespace:') fspacename = p.read_delimited_field(':') if p.rest is not None: msg = "unexpected characters after filespace name >>%s" % p.rest raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg)) return fspacename def parse_dfs_url(filename, lineno, line): """ Parse the filespace: line which appears at the beginning of the gpfilespace configuration file. >>> parse_dfs_url('file', 1, 'dfs_url::localhost:9000/gpsql') 'localhost:9000/gpsql' """ p = LineParser(caller(), filename, lineno, line) p.ensure_starts_with('dfs_url::') dfs_url = p.read_delimited_field('::') if p.rest is not None: msg = "unexpected characters after filespace name >>%s" % p.rest raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg)) return dfs_url def parse_fspacesys(filename, lineno, line): """ Pasrse the filesystem name: the optional second line in the gpfilespace configuration file. >>> parse_fspacetype('file', 2, 'fsysname:local|filesystem_name') local|filesystem_name """ p = LineParser(caller(), filename, lineno, line) if not p.does_starts_with('fsysname:'): return None p.ensure_starts_with('fsysname:') fsysname = p.read_delimited_field(':') if p.rest is not None: msg = "unexpected characters after filespace type >>%s" % p.rest raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg)) return fsysname def parse_fspacereplica(filename, lineno, line): """ Pasrse the filespace replica: the optional third line in the gpfilespace configuration file. >>> parse_fspacereplica('file', 3, 'fsreplica:repnum') repnum """ p = LineParser(caller(), filename, lineno, line) if not p.does_starts_with('fsreplica:'): return None p.ensure_starts_with('fsreplica:') fsreplica = p.read_delimited_field(':') if p.rest is not None: msg = "unexpected characters after filespace replica >>%s" % p.rest raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg)) return fsreplica def parse_gpfilespace_line(filename, lineno, line): """ Parse a line of the gpfilespace configuration file other than the first. >>> parse_gpfilespace_line('file', 1, '[::1]:dbid:path') ('::1', 'dbid', 'path') >>> parse_gpfilespace_line('file', 1, 'host:dbid:path') ('host', 'dbid', 'path') """ p = LineParser(caller(), filename, lineno, line) host = p.handle_field('[host]') # [host] indicates possible IPv6 address dbid = p.handle_field('dbid') path = p.handle_field('[path]') # url contains the ':'. if p.rest is not None: msg = "unexpected characters after path name >>%s" % p.rest raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg)) return host, dbid, path ################ # gpexpand segment file format: # # Form of file is hostname:address:port:dtadir:dbid:contentId:role[:replicationPort] ################ def parse_gpexpand_segment_line(filename, lineno, line): """ Parse a line of the gpexpand configuration file. >>> parse_gpexpand_segment_line('file', 1, "localhost:[::1]:40001:/Users/ctaylor/data/p2/gpseg1:4:1:p") ('localhost', '::1', '40001', '/Users/ctaylor/data/p2/gpseg1', '4', '1', 'p', None) >>> parse_gpexpand_segment_line('file', 1, "localhost:[::1]:40001:/Users/ctaylor/data/p2/gpseg1:4:1:p:41001") ('localhost', '::1', '40001', '/Users/ctaylor/data/p2/gpseg1', '4', '1', 'p', '41001') """ p = LineParser(caller(), filename, lineno, line) hostname = p.handle_field('[host]') # [host] indicates possible IPv6 address address = p.handle_field('[address]') # [address] indicates possible IPv6 address port = p.handle_field('port') datadir = p.handle_field('datadir') dbid = p.handle_field('dbid') contentId = p.handle_field('contentId') role = p.handle_field('role') replicationPort = None if p.rest is not None: replicationPort = p.handle_field('replicationPort') if p.rest is not None: msg = "unexpected characters after replicationPort >>%s" % p.rest raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg)) return hostname, address, port, datadir, dbid, contentId, role, replicationPort ################ # gpaddmirrors format: # # filespaceOrder=[filespace1_fsname[:filespace2_fsname:...]] # mirror[content]=content:address:port:mir_replication_port:pri_replication_port:fselocation[:fselocation:...] # ################ def parse_filespace_order(filename, lineno, line): """ Parse the filespaceOrder= line appearing at the beginning of the gpaddmirrors, gpmovemirrors and gprecoverseg configuration files. >>> parse_filespace_order('file', 1, "filespaceOrder=fs1:fs2:fs3") ['fs1', 'fs2', 'fs3'] >>> parse_filespace_order('file', 1, "filespaceOrder=") [] """ p = LineParser(caller(), filename, lineno, line) p.ensure_starts_with('filespaceOrder=') fslist = [] while p.rest: fslist.append( p.read_delimited_field(':', 'next filespace') ) return fslist def parse_gpaddmirrors_line(filename, lineno, line, fslist): """ Parse a line in the gpaddmirrors configuration file other than the first. >>> line = "mirror0=0:[::1]:40001:50001:60001:/Users/ctaylor/data/p2/gpseg1" >>> fixed, flex = parse_gpaddmirrors_line('file', 1, line, []) >>> fixed["address"], fixed["contentId"], fixed["dataDirectory"] ('::1', '0', '/Users/ctaylor/data/p2/gpseg1') """ fixed = {} flexible = {} p = LineParser(caller(), filename, lineno, line) p.ensure_starts_with('mirror') p.read_delimited_field('=', 'content id', consume_to) # [address] indicates possible IPv6 address for field in [ 'contentId', '[address]', 'port', 'replicationPort', 'primarySegmentReplicationPort', 'dataDirectory' ]: p.handle_field(field, fixed) for fsname in fslist: p.handle_field('[' + fsname + ']', flexible) return fixed, flexible ################ # gpmovemirrors format: # # This is basically the same as the gprecoverseg format (since gpmovemirrors ultimately just # passes the input file after validating it) but the field names are slightly different. # # filespaceOrder=[filespace1_fsname[:filespace2_fsname:...] # old_address:port:datadir new_address:port:replication_port:datadir[:fselocation:...] # ^ # note space ################ def parse_gpmovemirrors_line(filename, lineno, line, fslist): """ Parse a line in the gpmovemirrors configuration file other than the first. >>> line = "[::1]:40001:/Users/ctaylor/data/m2/gpseg1 [::2]:40101:50101:/Users/ctaylor/data/m2/gpseg1:/fs1" >>> fixed, flex = parse_gpmovemirrors_line('file', 1, line, ['fs1']) >>> fixed["oldAddress"], fixed["newAddress"] ('::1', '::2') >>> flex {'fs1': '/fs1'} """ groups = len(line.split()) if groups != 2: msg = "need two groups of fields delimited by a space for old and new mirror, not %d" % groups raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg)) fixed = {} flexible = {} p = LineParser(caller(), filename, lineno, line) p.handle_field('[oldAddress]', fixed) # [oldAddress] indicates possible IPv6 address p.handle_field('oldPort', fixed) p.handle_field('oldDataDirectory', fixed, delimiter=' ', stripchars=' \t') # MPP-15675 note stripchars here and next line p.handle_field('[newAddress]', fixed, stripchars=' \t') # [newAddress] indicates possible IPv6 address p.handle_field('newPort', fixed) p.handle_field('newReplicationPort', fixed) p.handle_field('newDataDirectory', fixed) for fsname in fslist: p.handle_field(fsname, flexible) if p.rest is not None: msg = "unexpected characters after mirror fields >>%s" % p.rest raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg)) return fixed, flexible ################ # gprecoverseg format: # # filespaceOrder=[filespace1_fsname[:filespace2_fsname:...]] # failed_host_address:port:datadir [recovery_host_address:port:replication_port:datadir[:fselocation:...]] # ^ # note space # # filespace locations are only present at the end of the other fields when there # are two groups of fields separated by a space. If there is only one group of # fields then we assume the entire line is only three fields as below with no # filespace locations: # # failed_host_address:port:datadir ################ def parse_gprecoverseg_line(filename, lineno, line, fslist): """ Parse a line in the gprecoverseg configuration file other than the first. >>> line = "[::1]:40001:/Users/ctaylor/data/m2/gpseg1" >>> fixed, flex = parse_gprecoverseg_line('file', 1, line, []) >>> fixed["failedAddress"], fixed["failedPort"], fixed["failedDataDirectory"] ('::1', '40001', '/Users/ctaylor/data/m2/gpseg1') >>> line = "[::1]:40001:/Users/ctaylor/data/m2/gpseg1 [::2]:40101:50101:/Users/ctaylor/data/m2/gpseg1:/fs1" >>> fixed, flex = parse_gprecoverseg_line('file', 1, line, ['fs1']) >>> fixed["newAddress"], fixed["newPort"], fixed["newReplicationPort"], fixed["newDataDirectory"] ('::2', '40101', '50101', '/Users/ctaylor/data/m2/gpseg1') >>> flex {'fs1': '/fs1'} """ groups = len(line.split()) if groups not in [1, 2]: msg = "only one or two groups of fields delimited by a space, not %d" % groups raise ExceptionNoStackTraceNeeded("%s:%s:%s LINE >>%s\n%s" % (filename, lineno, caller(), line, msg)) fixed = {} flexible = {} p = LineParser(caller(), filename, lineno, line) p.handle_field('[failedAddress]', fixed) # [failedAddress] indicates possible IPv6 address p.handle_field('failedPort', fixed) if groups == 1: p.handle_field('failedDataDirectory', fixed) else: p.handle_field('failedDataDirectory', fixed, delimiter=' ', stripchars=' \t') # MPP-15675 note stripchars here and next line p.handle_field('[newAddress]', fixed, stripchars=' \t') # [newAddress] indicates possible IPv6 address p.handle_field('newPort', fixed) p.handle_field('newReplicationPort', fixed) p.handle_field('newDataDirectory', fixed) for fsname in fslist: p.handle_field('[' + fsname + ']', flexible) return fixed, flexible if __name__ == '__main__': import doctest doctest.testmod()
PGer/incubator-hawq
tools/bin/gppylib/parseutils.py
Python
apache-2.0
22,013
package org.keycloak.example.oauth; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.jboss.logging.Logger; import org.keycloak.KeycloakSecurityContext; import org.keycloak.adapters.AdapterUtils; import org.keycloak.servlet.ServletOAuthClient; import org.keycloak.util.JsonSerialization; import org.keycloak.util.UriUtils; import javax.enterprise.context.ApplicationScoped; import javax.faces.application.FacesMessage; import javax.faces.context.FacesContext; import javax.inject.Inject; import javax.inject.Named; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a> * @version $Revision: 1 $ */ @ApplicationScoped @Named("databaseClient") public class DatabaseClient { @Inject @ServletRequestQualifier private HttpServletRequest request; @Inject private HttpServletResponse response; @Inject private FacesContext facesContext; @Inject private ServletOAuthClient oauthClient; @Inject private UserData userData; private static final Logger logger = Logger.getLogger(DatabaseClient.class); public void retrieveAccessToken() { try { oauthClient.redirectRelative("client.jsf", request, response); } catch (IOException e) { throw new RuntimeException(e); } } static class TypedList extends ArrayList<String> {} public void sendCustomersRequest() { List<String> customers = sendRequestToDBApplication(getBaseUrl() + "/database/customers"); userData.setCustomers(customers); } public void sendProductsRequest() { List<String> products = sendRequestToDBApplication(getBaseUrl() + "/database/products"); userData.setProducts(products); } protected List<String> sendRequestToDBApplication(String dbUri) { HttpClient client = new DefaultHttpClient(); HttpGet get = new HttpGet(dbUri); try { if (userData.isHasAccessToken()) { get.addHeader("Authorization", "Bearer " + userData.getAccessToken()); } HttpResponse response = client.execute(get); switch (response.getStatusLine().getStatusCode()) { case 200: HttpEntity entity = response.getEntity(); InputStream is = entity.getContent(); try { return JsonSerialization.readValue(is, TypedList.class); } finally { is.close(); } case 401: facesContext.addMessage(null, new FacesMessage("Status: 401. Request not authenticated! You need to retrieve access token first.")); break; case 403: facesContext.addMessage(null, new FacesMessage("Status: 403. Access token has insufficient privileges")); break; default: facesContext.addMessage(null, new FacesMessage("Status: " + response.getStatusLine() + ". Not able to retrieve data. See log for details")); logger.warn("Error occured. Status: " + response.getStatusLine()); } return null; } catch (IOException e) { e.printStackTrace(); facesContext.addMessage(null, new FacesMessage("Unknown error. See log for details")); return null; } } public String getBaseUrl() { KeycloakSecurityContext session = (KeycloakSecurityContext)request.getAttribute(KeycloakSecurityContext.class.getName()); return AdapterUtils.getOriginForRestCalls(request.getRequestURL().toString(), session); } }
eugene-chow/keycloak
examples/demo-template/third-party-cdi/src/main/java/org/keycloak/example/oauth/DatabaseClient.java
Java
apache-2.0
4,047
<?php final class PhortuneLandingController extends PhortuneController { public function handleRequest(AphrontRequest $request) { $viewer = $request->getViewer(); $accounts = PhortuneAccountQuery::loadAccountsForUser( $viewer, PhabricatorContentSource::newFromRequest($request)); if (count($accounts) == 1) { $account = head($accounts); $next_uri = $this->getApplicationURI($account->getID().'/'); } else { $next_uri = $this->getApplicationURI('account/'); } return id(new AphrontRedirectResponse())->setURI($next_uri); } }
gsinkovskiy/phabricator
src/applications/phortune/controller/PhortuneLandingController.php
PHP
apache-2.0
588
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.optimizer.stats.annotation; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.plan.Statistics; public class AnnotateStatsProcCtx implements NodeProcessorCtx { private ParseContext pctx; private HiveConf conf; private Statistics andExprStats = null; public AnnotateStatsProcCtx(ParseContext pctx) { this.setParseContext(pctx); if(pctx != null) { this.setConf(pctx.getConf()); } else { this.setConf(null); } } public HiveConf getConf() { return conf; } public void setConf(HiveConf conf) { this.conf = conf; } public ParseContext getParseContext() { return pctx; } public void setParseContext(ParseContext pctx) { this.pctx = pctx; } public Statistics getAndExprStats() { return andExprStats; } public void setAndExprStats(Statistics andExprStats) { this.andExprStats = andExprStats; } }
cschenyuan/hive-hack
ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/AnnotateStatsProcCtx.java
Java
apache-2.0
1,861
from collections import OrderedDict import copy import operator from functools import partial, reduce, update_wrapper import warnings from django import forms from django.conf import settings from django.contrib import messages from django.contrib.admin import widgets, helpers from django.contrib.admin import validation from django.contrib.admin.checks import (BaseModelAdminChecks, ModelAdminChecks, InlineModelAdminChecks) from django.contrib.admin.exceptions import DisallowedModelAdminToField from django.contrib.admin.utils import (quote, unquote, flatten_fieldsets, get_deleted_objects, model_format_dict, NestedObjects, lookup_needs_distinct) from django.contrib.admin.templatetags.admin_static import static from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.auth import get_permission_codename from django.core import checks from django.core.exceptions import (PermissionDenied, ValidationError, FieldError, ImproperlyConfigured) from django.core.paginator import Paginator from django.core.urlresolvers import reverse from django.db import models, transaction, router from django.db.models.constants import LOOKUP_SEP from django.db.models.related import RelatedObject from django.db.models.fields import BLANK_CHOICE_DASH, FieldDoesNotExist from django.db.models.sql.constants import QUERY_TERMS from django.forms.formsets import all_valid, DELETION_FIELD_NAME from django.forms.models import (modelform_factory, modelformset_factory, inlineformset_factory, BaseInlineFormSet, modelform_defines_fields) from django.http import Http404, HttpResponseRedirect from django.http.response import HttpResponseBase from django.shortcuts import get_object_or_404 from django.template.response import SimpleTemplateResponse, TemplateResponse from django.utils import six from django.utils.decorators import method_decorator from django.utils.deprecation import (RenameMethodsBase, RemovedInDjango18Warning, RemovedInDjango19Warning) from django.utils.encoding import force_text, python_2_unicode_compatible from django.utils.html import escape, escapejs from django.utils.http import urlencode from django.utils.text import capfirst, get_text_list from django.utils.translation import ugettext as _ from django.utils.translation import ungettext from django.utils.safestring import mark_safe from django.views.decorators.csrf import csrf_protect IS_POPUP_VAR = '_popup' TO_FIELD_VAR = '_to_field' HORIZONTAL, VERTICAL = 1, 2 def get_content_type_for_model(obj): # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level. from django.contrib.contenttypes.models import ContentType return ContentType.objects.get_for_model(obj, for_concrete_model=False) def get_ul_class(radio_style): return 'radiolist' if radio_style == VERTICAL else 'radiolist inline' class IncorrectLookupParameters(Exception): pass # Defaults for formfield_overrides. ModelAdmin subclasses can change this # by adding to ModelAdmin.formfield_overrides. FORMFIELD_FOR_DBFIELD_DEFAULTS = { models.DateTimeField: { 'form_class': forms.SplitDateTimeField, 'widget': widgets.AdminSplitDateTime }, models.DateField: {'widget': widgets.AdminDateWidget}, models.TimeField: {'widget': widgets.AdminTimeWidget}, models.TextField: {'widget': widgets.AdminTextareaWidget}, models.URLField: {'widget': widgets.AdminURLFieldWidget}, models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget}, models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget}, models.CharField: {'widget': widgets.AdminTextInputWidget}, models.ImageField: {'widget': widgets.AdminFileWidget}, models.FileField: {'widget': widgets.AdminFileWidget}, models.EmailField: {'widget': widgets.AdminEmailInputWidget}, } csrf_protect_m = method_decorator(csrf_protect) class RenameBaseModelAdminMethods(forms.MediaDefiningClass, RenameMethodsBase): renamed_methods = ( ('queryset', 'get_queryset', RemovedInDjango18Warning), ) class BaseModelAdmin(six.with_metaclass(RenameBaseModelAdminMethods)): """Functionality common to both ModelAdmin and InlineAdmin.""" raw_id_fields = () fields = None exclude = None fieldsets = None form = forms.ModelForm filter_vertical = () filter_horizontal = () radio_fields = {} prepopulated_fields = {} formfield_overrides = {} readonly_fields = () ordering = None view_on_site = True # Validation of ModelAdmin definitions # Old, deprecated style: validator_class = None default_validator_class = validation.BaseValidator # New style: checks_class = BaseModelAdminChecks @classmethod def validate(cls, model): warnings.warn( 'ModelAdmin.validate() is deprecated. Use "check()" instead.', RemovedInDjango19Warning) if cls.validator_class: validator = cls.validator_class() else: validator = cls.default_validator_class() validator.validate(cls, model) @classmethod def check(cls, model, **kwargs): if cls.validator_class: warnings.warn( 'ModelAdmin.validator_class is deprecated. ' 'ModeAdmin validators must be converted to use ' 'the system check framework.', RemovedInDjango19Warning) validator = cls.validator_class() try: validator.validate(cls, model) except ImproperlyConfigured as e: return [checks.Error(e.args[0], hint=None, obj=cls)] else: return [] else: return cls.checks_class().check(cls, model, **kwargs) def __init__(self): overrides = FORMFIELD_FOR_DBFIELD_DEFAULTS.copy() overrides.update(self.formfield_overrides) self.formfield_overrides = overrides def formfield_for_dbfield(self, db_field, **kwargs): """ Hook for specifying the form Field instance for a given database Field instance. If kwargs are given, they're passed to the form Field's constructor. """ request = kwargs.pop("request", None) # If the field specifies choices, we don't need to look for special # admin widgets - we just need to use a select widget of some kind. if db_field.choices: return self.formfield_for_choice_field(db_field, request, **kwargs) # ForeignKey or ManyToManyFields if isinstance(db_field, (models.ForeignKey, models.ManyToManyField)): # Combine the field kwargs with any options for formfield_overrides. # Make sure the passed in **kwargs override anything in # formfield_overrides because **kwargs is more specific, and should # always win. if db_field.__class__ in self.formfield_overrides: kwargs = dict(self.formfield_overrides[db_field.__class__], **kwargs) # Get the correct formfield. if isinstance(db_field, models.ForeignKey): formfield = self.formfield_for_foreignkey(db_field, request, **kwargs) elif isinstance(db_field, models.ManyToManyField): formfield = self.formfield_for_manytomany(db_field, request, **kwargs) # For non-raw_id fields, wrap the widget with a wrapper that adds # extra HTML -- the "add other" interface -- to the end of the # rendered output. formfield can be None if it came from a # OneToOneField with parent_link=True or a M2M intermediary. if formfield and db_field.name not in self.raw_id_fields: related_modeladmin = self.admin_site._registry.get(db_field.rel.to) can_add_related = bool(related_modeladmin and related_modeladmin.has_add_permission(request)) formfield.widget = widgets.RelatedFieldWidgetWrapper( formfield.widget, db_field.rel, self.admin_site, can_add_related=can_add_related) return formfield # If we've got overrides for the formfield defined, use 'em. **kwargs # passed to formfield_for_dbfield override the defaults. for klass in db_field.__class__.mro(): if klass in self.formfield_overrides: kwargs = dict(copy.deepcopy(self.formfield_overrides[klass]), **kwargs) return db_field.formfield(**kwargs) # For any other type of field, just call its formfield() method. return db_field.formfield(**kwargs) def formfield_for_choice_field(self, db_field, request=None, **kwargs): """ Get a form Field for a database Field that has declared choices. """ # If the field is named as a radio_field, use a RadioSelect if db_field.name in self.radio_fields: # Avoid stomping on custom widget/choices arguments. if 'widget' not in kwargs: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) if 'choices' not in kwargs: kwargs['choices'] = db_field.get_choices( include_blank=db_field.blank, blank_choice=[('', _('None'))] ) return db_field.formfield(**kwargs) def get_field_queryset(self, db, db_field, request): """ If the ModelAdmin specifies ordering, the queryset should respect that ordering. Otherwise don't specify the queryset, let the field decide (returns None in that case). """ related_admin = self.admin_site._registry.get(db_field.rel.to, None) if related_admin is not None: ordering = related_admin.get_ordering(request) if ordering is not None and ordering != (): return db_field.rel.to._default_manager.using(db).order_by(*ordering) return None def formfield_for_foreignkey(self, db_field, request=None, **kwargs): """ Get a form Field for a ForeignKey. """ db = kwargs.get('using') if db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.rel, self.admin_site, using=db) elif db_field.name in self.radio_fields: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) kwargs['empty_label'] = _('None') if db_field.blank else None if 'queryset' not in kwargs: queryset = self.get_field_queryset(db, db_field, request) if queryset is not None: kwargs['queryset'] = queryset return db_field.formfield(**kwargs) def formfield_for_manytomany(self, db_field, request=None, **kwargs): """ Get a form Field for a ManyToManyField. """ # If it uses an intermediary model that isn't auto created, don't show # a field in admin. if not db_field.rel.through._meta.auto_created: return None db = kwargs.get('using') if db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ManyToManyRawIdWidget(db_field.rel, self.admin_site, using=db) kwargs['help_text'] = '' elif db_field.name in (list(self.filter_vertical) + list(self.filter_horizontal)): kwargs['widget'] = widgets.FilteredSelectMultiple(db_field.verbose_name, (db_field.name in self.filter_vertical)) if 'queryset' not in kwargs: queryset = self.get_field_queryset(db, db_field, request) if queryset is not None: kwargs['queryset'] = queryset return db_field.formfield(**kwargs) def get_view_on_site_url(self, obj=None): if obj is None or not self.view_on_site: return None if callable(self.view_on_site): return self.view_on_site(obj) elif self.view_on_site and hasattr(obj, 'get_absolute_url'): # use the ContentType lookup if view_on_site is True return reverse('admin:view_on_site', kwargs={ 'content_type_id': get_content_type_for_model(obj).pk, 'object_id': obj.pk }) @property def declared_fieldsets(self): warnings.warn( "ModelAdmin.declared_fieldsets is deprecated and " "will be removed in Django 1.9.", RemovedInDjango19Warning, stacklevel=2 ) if self.fieldsets: return self.fieldsets elif self.fields: return [(None, {'fields': self.fields})] return None def get_fields(self, request, obj=None): """ Hook for specifying fields. """ return self.fields def get_fieldsets(self, request, obj=None): """ Hook for specifying fieldsets. """ # We access the property and check if it triggers a warning. # If it does, then it's ours and we can safely ignore it, but if # it doesn't then it has been overridden so we must warn about the # deprecation. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") declared_fieldsets = self.declared_fieldsets if len(w) != 1 or not issubclass(w[0].category, RemovedInDjango19Warning): warnings.warn( "ModelAdmin.declared_fieldsets is deprecated and " "will be removed in Django 1.9.", RemovedInDjango19Warning ) if declared_fieldsets: return declared_fieldsets if self.fieldsets: return self.fieldsets return [(None, {'fields': self.get_fields(request, obj)})] def get_ordering(self, request): """ Hook for specifying field ordering. """ return self.ordering or () # otherwise we might try to *None, which is bad ;) def get_readonly_fields(self, request, obj=None): """ Hook for specifying custom readonly fields. """ return self.readonly_fields def get_prepopulated_fields(self, request, obj=None): """ Hook for specifying custom prepopulated fields. """ return self.prepopulated_fields def get_queryset(self, request): """ Returns a QuerySet of all model instances that can be edited by the admin site. This is used by changelist_view. """ qs = self.model._default_manager.get_queryset() # TODO: this should be handled by some parameter to the ChangeList. ordering = self.get_ordering(request) if ordering: qs = qs.order_by(*ordering) return qs def lookup_allowed(self, lookup, value): from django.contrib.admin.filters import SimpleListFilter model = self.model # Check FKey lookups that are allowed, so that popups produced by # ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to, # are allowed to work. for l in model._meta.related_fkey_lookups: # As ``limit_choices_to`` can be a callable, invoke it here. if callable(l): l = l() for k, v in widgets.url_params_from_lookup_dict(l).items(): if k == lookup and v == value: return True parts = lookup.split(LOOKUP_SEP) # Last term in lookup is a query term (__exact, __startswith etc) # This term can be ignored. if len(parts) > 1 and parts[-1] in QUERY_TERMS: parts.pop() # Special case -- foo__id__exact and foo__id queries are implied # if foo has been specifically included in the lookup list; so # drop __id if it is the last part. However, first we need to find # the pk attribute name. rel_name = None for part in parts[:-1]: try: field, _, _, _ = model._meta.get_field_by_name(part) except FieldDoesNotExist: # Lookups on non-existent fields are ok, since they're ignored # later. return True if hasattr(field, 'rel'): if field.rel is None: # This property or relation doesn't exist, but it's allowed # since it's ignored in ChangeList.get_filters(). return True model = field.rel.to if hasattr(field.rel, 'get_related_field'): rel_name = field.rel.get_related_field().name else: rel_name = None elif isinstance(field, RelatedObject): model = field.model rel_name = model._meta.pk.name else: rel_name = None if rel_name and len(parts) > 1 and parts[-1] == rel_name: parts.pop() if len(parts) == 1: return True clean_lookup = LOOKUP_SEP.join(parts) valid_lookups = [self.date_hierarchy] for filter_item in self.list_filter: if isinstance(filter_item, type) and issubclass(filter_item, SimpleListFilter): valid_lookups.append(filter_item.parameter_name) elif isinstance(filter_item, (list, tuple)): valid_lookups.append(filter_item[0]) else: valid_lookups.append(filter_item) return clean_lookup in valid_lookups def to_field_allowed(self, request, to_field): """ Returns True if the model associated with this admin should be allowed to be referenced by the specified field. """ opts = self.model._meta try: field = opts.get_field(to_field) except FieldDoesNotExist: return False # Check whether this model is the origin of a M2M relationship # in which case to_field has to be the pk on this model. if opts.many_to_many and field.primary_key: return True # Make sure at least one of the models registered for this site # references this field through a FK or a M2M relationship. registered_models = set() for model, admin in self.admin_site._registry.items(): registered_models.add(model) for inline in admin.inlines: registered_models.add(inline.model) for related_object in (opts.get_all_related_objects(include_hidden=True) + opts.get_all_related_many_to_many_objects()): related_model = related_object.model if (any(issubclass(model, related_model) for model in registered_models) and related_object.field.rel.get_related_field() == field): return True return False def has_add_permission(self, request): """ Returns True if the given request has permission to add an object. Can be overridden by the user in subclasses. """ opts = self.opts codename = get_permission_codename('add', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_change_permission(self, request, obj=None): """ Returns True if the given request has permission to change the given Django model instance, the default implementation doesn't examine the `obj` parameter. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to change the `obj` model instance. If `obj` is None, this should return True if the given request has permission to change *any* object of the given type. """ opts = self.opts codename = get_permission_codename('change', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_delete_permission(self, request, obj=None): """ Returns True if the given request has permission to change the given Django model instance, the default implementation doesn't examine the `obj` parameter. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to delete the `obj` model instance. If `obj` is None, this should return True if the given request has permission to delete *any* object of the given type. """ opts = self.opts codename = get_permission_codename('delete', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) @python_2_unicode_compatible class ModelAdmin(BaseModelAdmin): "Encapsulates all admin options and functionality for a given model." list_display = ('__str__',) list_display_links = () list_filter = () list_select_related = False list_per_page = 100 list_max_show_all = 200 list_editable = () search_fields = () date_hierarchy = None save_as = False save_on_top = False paginator = Paginator preserve_filters = True inlines = [] # Custom templates (designed to be over-ridden in subclasses) add_form_template = None change_form_template = None change_list_template = None delete_confirmation_template = None delete_selected_confirmation_template = None object_history_template = None # Actions actions = [] action_form = helpers.ActionForm actions_on_top = True actions_on_bottom = False actions_selection_counter = True # validation # Old, deprecated style: default_validator_class = validation.ModelAdminValidator # New style: checks_class = ModelAdminChecks def __init__(self, model, admin_site): self.model = model self.opts = model._meta self.admin_site = admin_site super(ModelAdmin, self).__init__() def __str__(self): return "%s.%s" % (self.model._meta.app_label, self.__class__.__name__) def get_inline_instances(self, request, obj=None): inline_instances = [] for inline_class in self.inlines: inline = inline_class(self.model, self.admin_site) if request: if not (inline.has_add_permission(request) or inline.has_change_permission(request, obj) or inline.has_delete_permission(request, obj)): continue if not inline.has_add_permission(request): inline.max_num = 0 inline_instances.append(inline) return inline_instances def get_urls(self): from django.conf.urls import patterns, url def wrap(view): def wrapper(*args, **kwargs): return self.admin_site.admin_view(view)(*args, **kwargs) return update_wrapper(wrapper, view) info = self.model._meta.app_label, self.model._meta.model_name urlpatterns = patterns('', url(r'^$', wrap(self.changelist_view), name='%s_%s_changelist' % info), url(r'^add/$', wrap(self.add_view), name='%s_%s_add' % info), url(r'^(.+)/history/$', wrap(self.history_view), name='%s_%s_history' % info), url(r'^(.+)/delete/$', wrap(self.delete_view), name='%s_%s_delete' % info), url(r'^(.+)/$', wrap(self.change_view), name='%s_%s_change' % info), ) return urlpatterns def urls(self): return self.get_urls() urls = property(urls) @property def media(self): extra = '' if settings.DEBUG else '.min' js = [ 'core.js', 'admin/RelatedObjectLookups.js', 'jquery%s.js' % extra, 'jquery.init.js' ] if self.actions is not None: js.append('actions%s.js' % extra) if self.prepopulated_fields: js.extend(['urlify.js', 'prepopulate%s.js' % extra]) return forms.Media(js=[static('admin/js/%s' % url) for url in js]) def get_model_perms(self, request): """ Returns a dict of all perms for this model. This dict has the keys ``add``, ``change``, and ``delete`` mapping to the True/False for each of those actions. """ return { 'add': self.has_add_permission(request), 'change': self.has_change_permission(request), 'delete': self.has_delete_permission(request), } def get_fields(self, request, obj=None): if self.fields: return self.fields form = self.get_form(request, obj, fields=None) return list(form.base_fields) + list(self.get_readonly_fields(request, obj)) def get_form(self, request, obj=None, **kwargs): """ Returns a Form class for use in the admin add view. This is used by add_view and change_view. """ if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) if self.exclude is None: exclude = [] else: exclude = list(self.exclude) exclude.extend(self.get_readonly_fields(request, obj)) if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # ModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) # if exclude is an empty list we pass None to be consistent with the # default on modelform_factory exclude = exclude or None defaults = { "form": self.form, "fields": fields, "exclude": exclude, "formfield_callback": partial(self.formfield_for_dbfield, request=request), } defaults.update(kwargs) if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = forms.ALL_FIELDS try: return modelform_factory(self.model, **defaults) except FieldError as e: raise FieldError('%s. Check fields/fieldsets/exclude attributes of class %s.' % (e, self.__class__.__name__)) def get_changelist(self, request, **kwargs): """ Returns the ChangeList class for use on the changelist page. """ from django.contrib.admin.views.main import ChangeList return ChangeList def get_object(self, request, object_id): """ Returns an instance matching the primary key provided. ``None`` is returned if no match is found (or the object_id failed validation against the primary key field). """ queryset = self.get_queryset(request) model = queryset.model try: object_id = model._meta.pk.to_python(object_id) return queryset.get(pk=object_id) except (model.DoesNotExist, ValidationError, ValueError): return None def get_changelist_form(self, request, **kwargs): """ Returns a Form class for use in the Formset on the changelist page. """ defaults = { "formfield_callback": partial(self.formfield_for_dbfield, request=request), } defaults.update(kwargs) if (defaults.get('fields') is None and not modelform_defines_fields(defaults.get('form'))): defaults['fields'] = forms.ALL_FIELDS return modelform_factory(self.model, **defaults) def get_changelist_formset(self, request, **kwargs): """ Returns a FormSet class for use on the changelist page if list_editable is used. """ defaults = { "formfield_callback": partial(self.formfield_for_dbfield, request=request), } defaults.update(kwargs) return modelformset_factory(self.model, self.get_changelist_form(request), extra=0, fields=self.list_editable, **defaults) def _get_formsets(self, request, obj): """ Helper function that exists to allow the deprecation warning to be executed while this function continues to return a generator. """ for inline in self.get_inline_instances(request, obj): yield inline.get_formset(request, obj) def get_formsets(self, request, obj=None): warnings.warn( "ModelAdmin.get_formsets() is deprecated and will be removed in " "Django 1.9. Use ModelAdmin.get_formsets_with_inlines() instead.", RemovedInDjango19Warning, stacklevel=2 ) return self._get_formsets(request, obj) def get_formsets_with_inlines(self, request, obj=None): """ Yields formsets and the corresponding inlines. """ # We call get_formsets() [deprecated] and check if it triggers a # warning. If it does, then it's ours and we can safely ignore it, but # if it doesn't then it has been overridden so we must warn about the # deprecation. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") formsets = self.get_formsets(request, obj) if len(w) != 1 or not issubclass(w[0].category, RemovedInDjango19Warning): warnings.warn( "ModelAdmin.get_formsets() is deprecated and will be removed in " "Django 1.9. Use ModelAdmin.get_formsets_with_inlines() instead.", RemovedInDjango19Warning ) if formsets: zipped = zip(formsets, self.get_inline_instances(request, None)) for formset, inline in zipped: yield formset, inline else: for inline in self.get_inline_instances(request, obj): yield inline.get_formset(request, obj), inline def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True): return self.paginator(queryset, per_page, orphans, allow_empty_first_page) def log_addition(self, request, object): """ Log that an object has been successfully added. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import LogEntry, ADDITION LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(object).pk, object_id=object.pk, object_repr=force_text(object), action_flag=ADDITION ) def log_change(self, request, object, message): """ Log that an object has been successfully changed. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import LogEntry, CHANGE LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(object).pk, object_id=object.pk, object_repr=force_text(object), action_flag=CHANGE, change_message=message ) def log_deletion(self, request, object, object_repr): """ Log that an object will be deleted. Note that this method must be called before the deletion. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import LogEntry, DELETION LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(object).pk, object_id=object.pk, object_repr=object_repr, action_flag=DELETION ) def action_checkbox(self, obj): """ A list_display column containing a checkbox widget. """ return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, force_text(obj.pk)) action_checkbox.short_description = mark_safe('<input type="checkbox" id="action-toggle" />') action_checkbox.allow_tags = True def get_actions(self, request): """ Return a dictionary mapping the names of all actions for this ModelAdmin to a tuple of (callable, name, description) for each action. """ # If self.actions is explicitly set to None that means that we don't # want *any* actions enabled on this page. from django.contrib.admin.views.main import _is_changelist_popup if self.actions is None or _is_changelist_popup(request): return OrderedDict() actions = [] # Gather actions from the admin site first for (name, func) in self.admin_site.actions: description = getattr(func, 'short_description', name.replace('_', ' ')) actions.append((func, name, description)) # Then gather them from the model admin and all parent classes, # starting with self and working back up. for klass in self.__class__.mro()[::-1]: class_actions = getattr(klass, 'actions', []) # Avoid trying to iterate over None if not class_actions: continue actions.extend(self.get_action(action) for action in class_actions) # get_action might have returned None, so filter any of those out. actions = filter(None, actions) # Convert the actions into an OrderedDict keyed by name. actions = OrderedDict( (name, (func, name, desc)) for func, name, desc in actions ) return actions def get_action_choices(self, request, default_choices=BLANK_CHOICE_DASH): """ Return a list of choices for use in a form object. Each choice is a tuple (name, description). """ choices = [] + default_choices for func, name, description in six.itervalues(self.get_actions(request)): choice = (name, description % model_format_dict(self.opts)) choices.append(choice) return choices def get_action(self, action): """ Return a given action from a parameter, which can either be a callable, or the name of a method on the ModelAdmin. Return is a tuple of (callable, name, description). """ # If the action is a callable, just use it. if callable(action): func = action action = action.__name__ # Next, look for a method. Grab it off self.__class__ to get an unbound # method instead of a bound one; this ensures that the calling # conventions are the same for functions and methods. elif hasattr(self.__class__, action): func = getattr(self.__class__, action) # Finally, look for a named method on the admin site else: try: func = self.admin_site.get_action(action) except KeyError: return None if hasattr(func, 'short_description'): description = func.short_description else: description = capfirst(action.replace('_', ' ')) return func, action, description def get_list_display(self, request): """ Return a sequence containing the fields to be displayed on the changelist. """ return self.list_display def get_list_display_links(self, request, list_display): """ Return a sequence containing the fields to be displayed as links on the changelist. The list_display parameter is the list of fields returned by get_list_display(). """ if self.list_display_links or self.list_display_links is None or not list_display: return self.list_display_links else: # Use only the first item in list_display as link return list(list_display)[:1] def get_list_filter(self, request): """ Returns a sequence containing the fields to be displayed as filters in the right sidebar of the changelist page. """ return self.list_filter def get_search_fields(self, request): """ Returns a sequence containing the fields to be searched whenever somebody submits a search query. """ return self.search_fields def get_search_results(self, request, queryset, search_term): """ Returns a tuple containing a queryset to implement the search, and a boolean indicating if the results may contain duplicates. """ # Apply keyword searches. def construct_search(field_name): if field_name.startswith('^'): return "%s__istartswith" % field_name[1:] elif field_name.startswith('='): return "%s__iexact" % field_name[1:] elif field_name.startswith('@'): return "%s__search" % field_name[1:] else: return "%s__icontains" % field_name use_distinct = False search_fields = self.get_search_fields(request) if search_fields and search_term: orm_lookups = [construct_search(str(search_field)) for search_field in search_fields] for bit in search_term.split(): or_queries = [models.Q(**{orm_lookup: bit}) for orm_lookup in orm_lookups] queryset = queryset.filter(reduce(operator.or_, or_queries)) if not use_distinct: for search_spec in orm_lookups: if lookup_needs_distinct(self.opts, search_spec): use_distinct = True break return queryset, use_distinct def get_preserved_filters(self, request): """ Returns the preserved filters querystring. """ match = request.resolver_match if self.preserve_filters and match: opts = self.model._meta current_url = '%s:%s' % (match.app_name, match.url_name) changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name) if current_url == changelist_url: preserved_filters = request.GET.urlencode() else: preserved_filters = request.GET.get('_changelist_filters') if preserved_filters: return urlencode({'_changelist_filters': preserved_filters}) return '' def construct_change_message(self, request, form, formsets): """ Construct a change message from a changed object. """ change_message = [] if form.changed_data: change_message.append(_('Changed %s.') % get_text_list(form.changed_data, _('and'))) if formsets: for formset in formsets: for added_object in formset.new_objects: change_message.append(_('Added %(name)s "%(object)s".') % {'name': force_text(added_object._meta.verbose_name), 'object': force_text(added_object)}) for changed_object, changed_fields in formset.changed_objects: change_message.append(_('Changed %(list)s for %(name)s "%(object)s".') % {'list': get_text_list(changed_fields, _('and')), 'name': force_text(changed_object._meta.verbose_name), 'object': force_text(changed_object)}) for deleted_object in formset.deleted_objects: change_message.append(_('Deleted %(name)s "%(object)s".') % {'name': force_text(deleted_object._meta.verbose_name), 'object': force_text(deleted_object)}) change_message = ' '.join(change_message) return change_message or _('No fields changed.') def message_user(self, request, message, level=messages.INFO, extra_tags='', fail_silently=False): """ Send a message to the user. The default implementation posts a message using the django.contrib.messages backend. Exposes almost the same API as messages.add_message(), but accepts the positional arguments in a different order to maintain backwards compatibility. For convenience, it accepts the `level` argument as a string rather than the usual level number. """ if not isinstance(level, int): # attempt to get the level if passed a string try: level = getattr(messages.constants, level.upper()) except AttributeError: levels = messages.constants.DEFAULT_TAGS.values() levels_repr = ', '.join('`%s`' % l for l in levels) raise ValueError('Bad message level string: `%s`. ' 'Possible values are: %s' % (level, levels_repr)) messages.add_message(request, level, message, extra_tags=extra_tags, fail_silently=fail_silently) def save_form(self, request, form, change): """ Given a ModelForm return an unsaved instance. ``change`` is True if the object is being changed, and False if it's being added. """ return form.save(commit=False) def save_model(self, request, obj, form, change): """ Given a model instance save it to the database. """ obj.save() def delete_model(self, request, obj): """ Given a model instance delete it from the database. """ obj.delete() def save_formset(self, request, form, formset, change): """ Given an inline formset save it to the database. """ formset.save() def save_related(self, request, form, formsets, change): """ Given the ``HttpRequest``, the parent ``ModelForm`` instance, the list of inline formsets and a boolean value based on whether the parent is being added or changed, save the related objects to the database. Note that at this point save_form() and save_model() have already been called. """ form.save_m2m() for formset in formsets: self.save_formset(request, form, formset, change=change) def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None): opts = self.model._meta app_label = opts.app_label preserved_filters = self.get_preserved_filters(request) form_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, form_url) view_on_site_url = self.get_view_on_site_url(obj) context.update({ 'add': add, 'change': change, 'has_add_permission': self.has_add_permission(request), 'has_change_permission': self.has_change_permission(request, obj), 'has_delete_permission': self.has_delete_permission(request, obj), 'has_file_field': True, # FIXME - this should check if form or formsets have a FileField, 'has_absolute_url': view_on_site_url is not None, 'absolute_url': view_on_site_url, 'form_url': form_url, 'opts': opts, 'content_type_id': get_content_type_for_model(self.model).pk, 'save_as': self.save_as, 'save_on_top': self.save_on_top, 'to_field_var': TO_FIELD_VAR, 'is_popup_var': IS_POPUP_VAR, 'app_label': app_label, }) if add and self.add_form_template is not None: form_template = self.add_form_template else: form_template = self.change_form_template return TemplateResponse(request, form_template or [ "admin/%s/%s/change_form.html" % (app_label, opts.model_name), "admin/%s/change_form.html" % app_label, "admin/change_form.html" ], context, current_app=self.admin_site.name) def response_add(self, request, obj, post_url_continue=None): """ Determines the HttpResponse for the add_view stage. """ opts = obj._meta pk_value = obj._get_pk_val() preserved_filters = self.get_preserved_filters(request) msg_dict = {'name': force_text(opts.verbose_name), 'obj': force_text(obj)} # Here, we distinguish between different save types by checking for # the presence of keys in request.POST. if IS_POPUP_VAR in request.POST: to_field = request.POST.get(TO_FIELD_VAR) if to_field: attr = str(to_field) else: attr = obj._meta.pk.attname value = obj.serializable_value(attr) return SimpleTemplateResponse('admin/popup_response.html', { 'pk_value': escape(pk_value), # for possible backwards-compatibility 'value': escape(value), 'obj': escapejs(obj) }) elif "_continue" in request.POST: msg = _('The %(name)s "%(obj)s" was added successfully. You may edit it again below.') % msg_dict self.message_user(request, msg, messages.SUCCESS) if post_url_continue is None: post_url_continue = reverse('admin:%s_%s_change' % (opts.app_label, opts.model_name), args=(quote(pk_value),), current_app=self.admin_site.name) post_url_continue = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url_continue) return HttpResponseRedirect(post_url_continue) elif "_addanother" in request.POST: msg = _('The %(name)s "%(obj)s" was added successfully. You may add another %(name)s below.') % msg_dict self.message_user(request, msg, messages.SUCCESS) redirect_url = request.path redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) else: msg = _('The %(name)s "%(obj)s" was added successfully.') % msg_dict self.message_user(request, msg, messages.SUCCESS) return self.response_post_save_add(request, obj) def response_change(self, request, obj): """ Determines the HttpResponse for the change_view stage. """ opts = self.model._meta pk_value = obj._get_pk_val() preserved_filters = self.get_preserved_filters(request) msg_dict = {'name': force_text(opts.verbose_name), 'obj': force_text(obj)} if "_continue" in request.POST: msg = _('The %(name)s "%(obj)s" was changed successfully. You may edit it again below.') % msg_dict self.message_user(request, msg, messages.SUCCESS) redirect_url = request.path redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) elif "_saveasnew" in request.POST: msg = _('The %(name)s "%(obj)s" was added successfully. You may edit it again below.') % msg_dict self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse('admin:%s_%s_change' % (opts.app_label, opts.model_name), args=(pk_value,), current_app=self.admin_site.name) redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) elif "_addanother" in request.POST: msg = _('The %(name)s "%(obj)s" was changed successfully. You may add another %(name)s below.') % msg_dict self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse('admin:%s_%s_add' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) else: msg = _('The %(name)s "%(obj)s" was changed successfully.') % msg_dict self.message_user(request, msg, messages.SUCCESS) return self.response_post_save_change(request, obj) def response_post_save_add(self, request, obj): """ Figure out where to redirect after the 'Save' button has been pressed when adding a new object. """ opts = self.model._meta if self.has_change_permission(request, None): post_url = reverse('admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def response_post_save_change(self, request, obj): """ Figure out where to redirect after the 'Save' button has been pressed when editing an existing object. """ opts = self.model._meta if self.has_change_permission(request, None): post_url = reverse('admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def response_action(self, request, queryset): """ Handle an admin action. This is called if a request is POSTed to the changelist; it returns an HttpResponse if the action was handled, and None otherwise. """ # There can be multiple action forms on the page (at the top # and bottom of the change list, for example). Get the action # whose button was pushed. try: action_index = int(request.POST.get('index', 0)) except ValueError: action_index = 0 # Construct the action form. data = request.POST.copy() data.pop(helpers.ACTION_CHECKBOX_NAME, None) data.pop("index", None) # Use the action whose button was pushed try: data.update({'action': data.getlist('action')[action_index]}) except IndexError: # If we didn't get an action from the chosen form that's invalid # POST data, so by deleting action it'll fail the validation check # below. So no need to do anything here pass action_form = self.action_form(data, auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) # If the form's valid we can handle the action. if action_form.is_valid(): action = action_form.cleaned_data['action'] select_across = action_form.cleaned_data['select_across'] func = self.get_actions(request)[action][0] # Get the list of selected PKs. If nothing's selected, we can't # perform an action on it, so bail. Except we want to perform # the action explicitly on all objects. selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) if not selected and not select_across: # Reminder that something needs to be selected or nothing will happen msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) return None if not select_across: # Perform the action only on the selected objects queryset = queryset.filter(pk__in=selected) response = func(self, request, queryset) # Actions may return an HttpResponse-like object, which will be # used as the response from the POST. If not, we'll be a good # little HTTP citizen and redirect back to the changelist page. if isinstance(response, HttpResponseBase): return response else: return HttpResponseRedirect(request.get_full_path()) else: msg = _("No action selected.") self.message_user(request, msg, messages.WARNING) return None def response_delete(self, request, obj_display): """ Determines the HttpResponse for the delete_view stage. """ opts = self.model._meta self.message_user(request, _('The %(name)s "%(obj)s" was deleted successfully.') % { 'name': force_text(opts.verbose_name), 'obj': force_text(obj_display) }, messages.SUCCESS) if self.has_change_permission(request, None): post_url = reverse('admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters( {'preserved_filters': preserved_filters, 'opts': opts}, post_url ) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def render_delete_form(self, request, context): opts = self.model._meta app_label = opts.app_label return TemplateResponse(request, self.delete_confirmation_template or [ "admin/{}/{}/delete_confirmation.html".format(app_label, opts.model_name), "admin/{}/delete_confirmation.html".format(app_label), "admin/delete_confirmation.html" ], context, current_app=self.admin_site.name) def get_inline_formsets(self, request, formsets, inline_instances, obj=None): inline_admin_formsets = [] for inline, formset in zip(inline_instances, formsets): fieldsets = list(inline.get_fieldsets(request, obj)) readonly = list(inline.get_readonly_fields(request, obj)) prepopulated = dict(inline.get_prepopulated_fields(request, obj)) inline_admin_formset = helpers.InlineAdminFormSet(inline, formset, fieldsets, prepopulated, readonly, model_admin=self) inline_admin_formsets.append(inline_admin_formset) return inline_admin_formsets def get_changeform_initial_data(self, request): """ Get the initial form data. Unless overridden, this populates from the GET params. """ initial = dict(request.GET.items()) for k in initial: try: f = self.model._meta.get_field(k) except models.FieldDoesNotExist: continue # We have to special-case M2Ms as a list of comma-separated PKs. if isinstance(f, models.ManyToManyField): initial[k] = initial[k].split(",") return initial @csrf_protect_m @transaction.atomic def changeform_view(self, request, object_id=None, form_url='', extra_context=None): to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) model = self.model opts = model._meta add = object_id is None if add: if not self.has_add_permission(request): raise PermissionDenied obj = None else: obj = self.get_object(request, unquote(object_id)) if not self.has_change_permission(request, obj): raise PermissionDenied if obj is None: raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % { 'name': force_text(opts.verbose_name), 'key': escape(object_id)}) if request.method == 'POST' and "_saveasnew" in request.POST: return self.add_view(request, form_url=reverse('admin:%s_%s_add' % ( opts.app_label, opts.model_name), current_app=self.admin_site.name)) ModelForm = self.get_form(request, obj) if request.method == 'POST': form = ModelForm(request.POST, request.FILES, instance=obj) if form.is_valid(): form_validated = True new_object = self.save_form(request, form, change=not add) else: form_validated = False new_object = form.instance formsets, inline_instances = self._create_formsets(request, new_object) if all_valid(formsets) and form_validated: self.save_model(request, new_object, form, not add) self.save_related(request, form, formsets, not add) if add: self.log_addition(request, new_object) return self.response_add(request, new_object) else: change_message = self.construct_change_message(request, form, formsets) self.log_change(request, new_object, change_message) return self.response_change(request, new_object) else: if add: initial = self.get_changeform_initial_data(request) form = ModelForm(initial=initial) formsets, inline_instances = self._create_formsets(request, self.model()) else: form = ModelForm(instance=obj) formsets, inline_instances = self._create_formsets(request, obj) adminForm = helpers.AdminForm( form, list(self.get_fieldsets(request, obj)), self.get_prepopulated_fields(request, obj), self.get_readonly_fields(request, obj), model_admin=self) media = self.media + adminForm.media inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj) for inline_formset in inline_formsets: media = media + inline_formset.media context = dict(self.admin_site.each_context(), title=(_('Add %s') if add else _('Change %s')) % force_text(opts.verbose_name), adminform=adminForm, object_id=object_id, original=obj, is_popup=(IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET), to_field=to_field, media=media, inline_admin_formsets=inline_formsets, errors=helpers.AdminErrorList(form, formsets), preserved_filters=self.get_preserved_filters(request), ) context.update(extra_context or {}) return self.render_change_form(request, context, add=add, change=not add, obj=obj, form_url=form_url) def add_view(self, request, form_url='', extra_context=None): return self.changeform_view(request, None, form_url, extra_context) def change_view(self, request, object_id, form_url='', extra_context=None): return self.changeform_view(request, object_id, form_url, extra_context) @csrf_protect_m def changelist_view(self, request, extra_context=None): """ The 'change list' admin view for this model. """ from django.contrib.admin.views.main import ERROR_FLAG opts = self.model._meta app_label = opts.app_label if not self.has_change_permission(request, None): raise PermissionDenied list_display = self.get_list_display(request) list_display_links = self.get_list_display_links(request, list_display) list_filter = self.get_list_filter(request) search_fields = self.get_search_fields(request) # Check actions to see if any are available on this changelist actions = self.get_actions(request) if actions: # Add the action checkboxes if there are any actions available. list_display = ['action_checkbox'] + list(list_display) ChangeList = self.get_changelist(request) try: cl = ChangeList(request, self.model, list_display, list_display_links, list_filter, self.date_hierarchy, search_fields, self.list_select_related, self.list_per_page, self.list_max_show_all, self.list_editable, self) except IncorrectLookupParameters: # Wacky lookup parameters were given, so redirect to the main # changelist page, without parameters, and pass an 'invalid=1' # parameter via the query string. If wacky parameters were given # and the 'invalid=1' parameter was already in the query string, # something is screwed up with the database, so display an error # page. if ERROR_FLAG in request.GET.keys(): return SimpleTemplateResponse('admin/invalid_setup.html', { 'title': _('Database error'), }) return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1') # If the request was POSTed, this might be a bulk action or a bulk # edit. Try to look up an action or confirmation first, but if this # isn't an action the POST will fall through to the bulk edit check, # below. action_failed = False selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) # Actions with no confirmation if (actions and request.method == 'POST' and 'index' in request.POST and '_save' not in request.POST): if selected: response = self.response_action(request, queryset=cl.get_queryset(request)) if response: return response else: action_failed = True else: msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) action_failed = True # Actions with confirmation if (actions and request.method == 'POST' and helpers.ACTION_CHECKBOX_NAME in request.POST and 'index' not in request.POST and '_save' not in request.POST): if selected: response = self.response_action(request, queryset=cl.get_queryset(request)) if response: return response else: action_failed = True # If we're allowing changelist editing, we need to construct a formset # for the changelist given all the fields to be edited. Then we'll # use the formset to validate/process POSTed data. formset = cl.formset = None # Handle POSTed bulk-edit data. if (request.method == "POST" and cl.list_editable and '_save' in request.POST and not action_failed): FormSet = self.get_changelist_formset(request) formset = cl.formset = FormSet(request.POST, request.FILES, queryset=cl.result_list) if formset.is_valid(): changecount = 0 for form in formset.forms: if form.has_changed(): obj = self.save_form(request, form, change=True) self.save_model(request, obj, form, change=True) self.save_related(request, form, formsets=[], change=True) change_msg = self.construct_change_message(request, form, None) self.log_change(request, obj, change_msg) changecount += 1 if changecount: if changecount == 1: name = force_text(opts.verbose_name) else: name = force_text(opts.verbose_name_plural) msg = ungettext("%(count)s %(name)s was changed successfully.", "%(count)s %(name)s were changed successfully.", changecount) % {'count': changecount, 'name': name, 'obj': force_text(obj)} self.message_user(request, msg, messages.SUCCESS) return HttpResponseRedirect(request.get_full_path()) # Handle GET -- construct a formset for display. elif cl.list_editable: FormSet = self.get_changelist_formset(request) formset = cl.formset = FormSet(queryset=cl.result_list) # Build the list of media to be used by the formset. if formset: media = self.media + formset.media else: media = self.media # Build the action form and populate it with available actions. if actions: action_form = self.action_form(auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) else: action_form = None selection_note_all = ungettext('%(total_count)s selected', 'All %(total_count)s selected', cl.result_count) context = dict( self.admin_site.each_context(), module_name=force_text(opts.verbose_name_plural), selection_note=_('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)}, selection_note_all=selection_note_all % {'total_count': cl.result_count}, title=cl.title, is_popup=cl.is_popup, to_field=cl.to_field, cl=cl, media=media, has_add_permission=self.has_add_permission(request), opts=cl.opts, action_form=action_form, actions_on_top=self.actions_on_top, actions_on_bottom=self.actions_on_bottom, actions_selection_counter=self.actions_selection_counter, preserved_filters=self.get_preserved_filters(request), ) context.update(extra_context or {}) return TemplateResponse(request, self.change_list_template or [ 'admin/%s/%s/change_list.html' % (app_label, opts.model_name), 'admin/%s/change_list.html' % app_label, 'admin/change_list.html' ], context, current_app=self.admin_site.name) @csrf_protect_m @transaction.atomic def delete_view(self, request, object_id, extra_context=None): "The 'delete' admin view for this model." opts = self.model._meta app_label = opts.app_label obj = self.get_object(request, unquote(object_id)) if not self.has_delete_permission(request, obj): raise PermissionDenied if obj is None: raise Http404( _('%(name)s object with primary key %(key)r does not exist.') % {'name': force_text(opts.verbose_name), 'key': escape(object_id)} ) using = router.db_for_write(self.model) # Populate deleted_objects, a data structure of all related objects that # will also be deleted. (deleted_objects, perms_needed, protected) = get_deleted_objects( [obj], opts, request.user, self.admin_site, using) if request.POST: # The user has already confirmed the deletion. if perms_needed: raise PermissionDenied obj_display = force_text(obj) self.log_deletion(request, obj, obj_display) self.delete_model(request, obj) return self.response_delete(request, obj_display) object_name = force_text(opts.verbose_name) if perms_needed or protected: title = _("Cannot delete %(name)s") % {"name": object_name} else: title = _("Are you sure?") context = dict( self.admin_site.each_context(), title=title, object_name=object_name, object=obj, deleted_objects=deleted_objects, perms_lacking=perms_needed, protected=protected, opts=opts, app_label=app_label, preserved_filters=self.get_preserved_filters(request), ) context.update(extra_context or {}) return self.render_delete_form(request, context) def history_view(self, request, object_id, extra_context=None): "The 'history' admin view for this model." from django.contrib.admin.models import LogEntry # First check if the user can see this history. model = self.model obj = get_object_or_404(self.get_queryset(request), pk=unquote(object_id)) if not self.has_change_permission(request, obj): raise PermissionDenied # Then get the history for this object. opts = model._meta app_label = opts.app_label action_list = LogEntry.objects.filter( object_id=unquote(object_id), content_type=get_content_type_for_model(model) ).select_related().order_by('action_time') context = dict(self.admin_site.each_context(), title=_('Change history: %s') % force_text(obj), action_list=action_list, module_name=capfirst(force_text(opts.verbose_name_plural)), object=obj, opts=opts, preserved_filters=self.get_preserved_filters(request), ) context.update(extra_context or {}) return TemplateResponse(request, self.object_history_template or [ "admin/%s/%s/object_history.html" % (app_label, opts.model_name), "admin/%s/object_history.html" % app_label, "admin/object_history.html" ], context, current_app=self.admin_site.name) def _create_formsets(self, request, obj): "Helper function to generate formsets for add/change_view." formsets = [] inline_instances = [] prefixes = {} get_formsets_args = [request] if obj.pk: get_formsets_args.append(obj) for FormSet, inline in self.get_formsets_with_inlines(*get_formsets_args): prefix = FormSet.get_default_prefix() prefixes[prefix] = prefixes.get(prefix, 0) + 1 if prefixes[prefix] != 1 or not prefix: prefix = "%s-%s" % (prefix, prefixes[prefix]) formset_params = { 'instance': obj, 'prefix': prefix, 'queryset': inline.get_queryset(request), } if request.method == 'POST': formset_params.update({ 'data': request.POST, 'files': request.FILES, 'save_as_new': '_saveasnew' in request.POST }) formsets.append(FormSet(**formset_params)) inline_instances.append(inline) return formsets, inline_instances class InlineModelAdmin(BaseModelAdmin): """ Options for inline editing of ``model`` instances. Provide ``fk_name`` to specify the attribute name of the ``ForeignKey`` from ``model`` to its parent. This is required if ``model`` has more than one ``ForeignKey`` to its parent. """ model = None fk_name = None formset = BaseInlineFormSet extra = 3 min_num = None max_num = None template = None verbose_name = None verbose_name_plural = None can_delete = True checks_class = InlineModelAdminChecks def __init__(self, parent_model, admin_site): self.admin_site = admin_site self.parent_model = parent_model self.opts = self.model._meta super(InlineModelAdmin, self).__init__() if self.verbose_name is None: self.verbose_name = self.model._meta.verbose_name if self.verbose_name_plural is None: self.verbose_name_plural = self.model._meta.verbose_name_plural @property def media(self): extra = '' if settings.DEBUG else '.min' js = ['jquery%s.js' % extra, 'jquery.init.js', 'inlines%s.js' % extra] if self.prepopulated_fields: js.extend(['urlify.js', 'prepopulate%s.js' % extra]) if self.filter_vertical or self.filter_horizontal: js.extend(['SelectBox.js', 'SelectFilter2.js']) return forms.Media(js=[static('admin/js/%s' % url) for url in js]) def get_extra(self, request, obj=None, **kwargs): """Hook for customizing the number of extra inline forms.""" return self.extra def get_min_num(self, request, obj=None, **kwargs): """Hook for customizing the min number of inline forms.""" return self.min_num def get_max_num(self, request, obj=None, **kwargs): """Hook for customizing the max number of extra inline forms.""" return self.max_num def get_formset(self, request, obj=None, **kwargs): """Returns a BaseInlineFormSet class for use in admin add/change views.""" if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) if self.exclude is None: exclude = [] else: exclude = list(self.exclude) exclude.extend(self.get_readonly_fields(request, obj)) if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # InlineModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) # If exclude is an empty list we use None, since that's the actual # default. exclude = exclude or None can_delete = self.can_delete and self.has_delete_permission(request, obj) defaults = { "form": self.form, "formset": self.formset, "fk_name": self.fk_name, "fields": fields, "exclude": exclude, "formfield_callback": partial(self.formfield_for_dbfield, request=request), "extra": self.get_extra(request, obj, **kwargs), "min_num": self.get_min_num(request, obj, **kwargs), "max_num": self.get_max_num(request, obj, **kwargs), "can_delete": can_delete, } defaults.update(kwargs) base_model_form = defaults['form'] class DeleteProtectedModelForm(base_model_form): def hand_clean_DELETE(self): """ We don't validate the 'DELETE' field itself because on templates it's not rendered using the field information, but just using a generic "deletion_field" of the InlineModelAdmin. """ if self.cleaned_data.get(DELETION_FIELD_NAME, False): using = router.db_for_write(self._meta.model) collector = NestedObjects(using=using) collector.collect([self.instance]) if collector.protected: objs = [] for p in collector.protected: objs.append( # Translators: Model verbose name and instance representation, suitable to be an item in a list _('%(class_name)s %(instance)s') % { 'class_name': p._meta.verbose_name, 'instance': p} ) params = {'class_name': self._meta.model._meta.verbose_name, 'instance': self.instance, 'related_objects': get_text_list(objs, _('and'))} msg = _("Deleting %(class_name)s %(instance)s would require " "deleting the following protected related objects: " "%(related_objects)s") raise ValidationError(msg, code='deleting_protected', params=params) def is_valid(self): result = super(DeleteProtectedModelForm, self).is_valid() self.hand_clean_DELETE() return result defaults['form'] = DeleteProtectedModelForm if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = forms.ALL_FIELDS return inlineformset_factory(self.parent_model, self.model, **defaults) def get_fields(self, request, obj=None): if self.fields: return self.fields form = self.get_formset(request, obj, fields=None).form return list(form.base_fields) + list(self.get_readonly_fields(request, obj)) def get_queryset(self, request): queryset = super(InlineModelAdmin, self).get_queryset(request) if not self.has_change_permission(request): queryset = queryset.none() return queryset def has_add_permission(self, request): if self.opts.auto_created: # We're checking the rights to an auto-created intermediate model, # which doesn't have its own individual permissions. The user needs # to have the change permission for the related model in order to # be able to do anything with the intermediate model. return self.has_change_permission(request) return super(InlineModelAdmin, self).has_add_permission(request) def has_change_permission(self, request, obj=None): opts = self.opts if opts.auto_created: # The model was auto-created as intermediary for a # ManyToMany-relationship, find the target model for field in opts.fields: if field.rel and field.rel.to != self.parent_model: opts = field.rel.to._meta break codename = get_permission_codename('change', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_delete_permission(self, request, obj=None): if self.opts.auto_created: # We're checking the rights to an auto-created intermediate model, # which doesn't have its own individual permissions. The user needs # to have the change permission for the related model in order to # be able to do anything with the intermediate model. return self.has_change_permission(request, obj) return super(InlineModelAdmin, self).has_delete_permission(request, obj) class StackedInline(InlineModelAdmin): template = 'admin/edit_inline/stacked.html' class TabularInline(InlineModelAdmin): template = 'admin/edit_inline/tabular.html'
boooka/GeoPowerOff
venv/lib/python2.7/site-packages/django/contrib/admin/options.py
Python
apache-2.0
80,797
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include <stdint.h> #include <initializer_list> #include <string> #include <vector> #include <gmock/gmock.h> #include <gtest/gtest.h> #include "tensorflow/lite/kernels/test_util.h" #include "tensorflow/lite/schema/schema_generated.h" #include "tensorflow/lite/string_type.h" namespace tflite { namespace { using ::testing::ElementsAreArray; using ::testing::IsEmpty; enum class TestType { kConst = 0, kDynamic = 1, }; template <typename dims_type, typename value_type> class FillOpModel : public SingleOpModel { public: explicit FillOpModel(TensorType dims_tensor_type, std::initializer_list<int> dims_shape, std::initializer_list<dims_type> dims_data, value_type value, TestType input_tensor_types) { if (input_tensor_types == TestType::kDynamic) { dims_ = AddInput(dims_tensor_type); } else { dims_ = AddConstInput(dims_tensor_type, dims_data, dims_shape); } value_ = AddInput(GetTensorType<value_type>()); output_ = AddOutput(GetTensorType<value_type>()); SetBuiltinOp(BuiltinOperator_FILL, BuiltinOptions_FillOptions, CreateFillOptions(builder_).Union()); BuildInterpreter({dims_shape, {}}); if (input_tensor_types == TestType::kDynamic) { if (dims_data.size() > 0) { PopulateTensor<dims_type>(dims_, dims_data); } } PopulateTensor<value_type>(value_, {value}); } std::vector<value_type> GetOutput() { return ExtractVector<value_type>(output_); } std::vector<int> GetOutputShape() { return GetTensorShape(output_); } protected: int dims_; int value_; int output_; }; template <typename dims_type, typename quant_type> class QuantizedFillOpModel : public SingleOpModel { public: explicit QuantizedFillOpModel(TensorType dims_tensor_type, std::initializer_list<int> dims_shape, std::initializer_list<dims_type> dims_data, const TensorData& tensor_data, float value) { dims_ = AddInput(dims_tensor_type); value_ = AddInput(tensor_data); output_ = AddOutput(tensor_data); SetBuiltinOp(BuiltinOperator_FILL, BuiltinOptions_FillOptions, CreateFillOptions(builder_).Union()); BuildInterpreter({dims_shape, {}}); if (dims_data.size() > 0) { PopulateTensor<dims_type>(dims_, dims_data); } QuantizeAndPopulate<quant_type>(value_, {value}); } std::vector<quant_type> GetOutput() { return ExtractVector<quant_type>(output_); } std::vector<float> GetDequantizedOutput() { TfLiteTensor* t = interpreter_->tensor(output_); return Dequantize(GetOutput(), t->params.scale, t->params.zero_point); } std::vector<int> GetOutputShape() { return GetTensorShape(output_); } protected: int dims_; int value_; int output_; }; class FillOpTest : public ::testing::TestWithParam<TestType> {}; TEST_P(FillOpTest, FillInt32) { FillOpModel<int32_t, int32_t> m(TensorType_INT32, {2}, {2, 3}, -11, GetParam()); m.Invoke(); EXPECT_THAT(m.GetOutput(), ElementsAreArray({-11, -11, -11, -11, -11, -11})); EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 3})); } TEST_P(FillOpTest, FillInt64) { FillOpModel<int64_t, int64_t> m(TensorType_INT64, {2}, {2, 4}, 1LL << 45, GetParam()); m.Invoke(); EXPECT_THAT(m.GetOutput(), ElementsAreArray({1LL << 45, 1LL << 45, 1LL << 45, 1LL << 45, 1LL << 45, 1LL << 45, 1LL << 45, 1LL << 45})); EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 4})); } TEST_P(FillOpTest, FillFloat) { FillOpModel<int64_t, float> m(TensorType_INT64, {3}, {2, 2, 2}, 4.0, GetParam()); m.Invoke(); EXPECT_THAT(m.GetOutput(), ElementsAreArray({4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0})); EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 2, 2})); } TEST_P(FillOpTest, FillFloatInt32Dims) { FillOpModel<int32_t, float> m(TensorType_INT32, {3}, {2, 2, 2}, 4.0, GetParam()); m.Invoke(); EXPECT_THAT(m.GetOutput(), ElementsAreArray({4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0})); EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 2, 2})); } TEST_P(FillOpTest, FillOutputScalar) { FillOpModel<int64_t, float> m(TensorType_INT64, {0}, {}, 4.0, GetParam()); m.Invoke(); EXPECT_THAT(m.GetOutput(), ElementsAreArray({4.0})); EXPECT_THAT(m.GetOutputShape(), IsEmpty()); } TEST_P(FillOpTest, FillBool) { FillOpModel<int64_t, bool> m(TensorType_INT64, {3}, {2, 2, 2}, true, GetParam()); m.Invoke(); EXPECT_THAT(m.GetOutput(), ElementsAreArray({true, true, true, true, true, true, true, true})); EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 2, 2})); } TEST(FillOpTest, FillString) { FillOpModel<int64_t, std::string> m(TensorType_INT64, {3}, {2, 2, 2}, "AB", TestType::kDynamic); m.Invoke(); EXPECT_THAT(m.GetOutput(), ElementsAreArray({"AB", "AB", "AB", "AB", "AB", "AB", "AB", "AB"})); EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 2, 2})); } TEST_P(FillOpTest, FillInt8) { FillOpModel<int64_t, int8_t> m(TensorType_INT64, {3}, {2, 2, 2}, 5, GetParam()); m.Invoke(); EXPECT_THAT(m.GetOutput(), ElementsAreArray({5, 5, 5, 5, 5, 5, 5, 5})); EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 2, 2})); } template <typename quant_type> void QuantizedFill(float value) { // Prepare TensorData for quantization of value const float kMin = -1; // Workaround to get a zero-point of 0 const float kMax = std::numeric_limits<quant_type>::max() / static_cast<float>(std::numeric_limits<quant_type>::max() + 1); const TensorData tensor_data(GetTensorType<quant_type>(), {}, std::abs(value) * kMin, std::abs(value) * kMax); QuantizedFillOpModel<int32_t, quant_type> m(TensorType_INT32, {2}, {2, 3}, tensor_data, value); m.Invoke(); constexpr float epsilon = 0.01f; const float min_value = tensor_data.min - epsilon; const float max_value = tensor_data.max + epsilon; const float kQuantizedTolerance = (max_value - min_value) / (std::numeric_limits<quant_type>::max() - std::numeric_limits<quant_type>::min()); EXPECT_THAT( m.GetDequantizedOutput(), ElementsAreArray(ArrayFloatNear( {value, value, value, value, value, value}, kQuantizedTolerance))); EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 3})); } TEST(FillOpTest, QuantizedFillInt8) { QuantizedFill<int8_t>(3.14f); } TEST(FillOpTest, QuantizedFillInt16) { QuantizedFill<int16_t>(3.14f); } INSTANTIATE_TEST_SUITE_P(FillOpTest, FillOpTest, ::testing::Values(TestType::kConst, TestType::kDynamic)); } // namespace } // namespace tflite
tensorflow/tensorflow
tensorflow/lite/kernels/fill_test.cc
C++
apache-2.0
7,859
/** * Copyright 2017 The AMP HTML Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {isLayoutSizeDefined} from '../../../src/layout'; import {tryParseJson} from '../../../src/json'; import {user} from '../../../src/log'; import {removeElement} from '../../../src/dom'; import { installVideoManagerForDoc, } from '../../../src/service/video-manager-impl'; import {isObject} from '../../../src/types'; import {listen} from '../../../src/event-helper'; import {VideoEvents} from '../../../src/video-interface'; import {videoManagerForDoc} from '../../../src/services'; /** * @implements {../../../src/video-interface.VideoInterface} */ class Amp3QPlayer extends AMP.BaseElement { /** @param {!AmpElement} element */ constructor(element) { super(element); /** @private {?Element} */ this.iframe_ = null; /** @private {?Function} */ this.unlistenMessage_ = null; /** @private {?Promise} */ this.playerReadyPromise_ = null; /** @private {?Function} */ this.playerReadyResolver_ = null; this.dataId = null; } /** * @param {boolean=} opt_onLayout * @override */ preconnectCallback(opt_onLayout) { this.preconnect.url('https://playout.3qsdn.com', opt_onLayout); } /** @override */ buildCallback() { this.dataId = user().assert( this.element.getAttribute('data-id'), 'The data-id attribute is required for <amp-3q-player> %s', this.element); this.playerReadyPromise_ = new Promise(resolve => { this.playerReadyResolver_ = resolve; }); installVideoManagerForDoc(this.element); videoManagerForDoc(this.element).register(this); } /** @override */ layoutCallback() { const iframe = this.element.ownerDocument.createElement('iframe'); iframe.setAttribute('frameborder', '0'); iframe.setAttribute('allowfullscreen', 'true'); this.iframe_ = iframe; this.unlistenMessage_ = listen( this.win, 'message', this.sdnBridge_.bind(this) ); this.applyFillContent(iframe, true); iframe.src = 'https://playout.3qsdn.com/' + encodeURIComponent(this.dataId) + '?autoplay=false&amp=true'; this.element.appendChild(iframe); return this.loadPromise(this.iframe_).then(() => this.playerReadyPromise_); } /** @override */ unlayoutCallback() { if (this.iframe_) { removeElement(this.iframe_); this.iframe_ = null; } if (this.unlistenMessage_) { this.unlistenMessage_(); } this.playerReadyPromise_ = new Promise(resolve => { this.playerReadyResolver_ = resolve; }); return true; } /** @override */ isLayoutSupported(layout) { return isLayoutSizeDefined(layout); } /** @override */ viewportCallback(visible) { this.element.dispatchCustomEvent(VideoEvents.VISIBILITY, {visible}); } /** @override */ pauseCallback() { if (this.iframe_) { this.pause(); } } sdnBridge_(event) { if (event.source) { if (event.source != this.iframe_.contentWindow) { return; } } const data = isObject(event.data) ? event.data : tryParseJson(event.data); if (data === undefined) { return; } switch (data.data) { case 'ready': this.element.dispatchCustomEvent(VideoEvents.LOAD); this.playerReadyResolver_(); break; case 'playing': this.element.dispatchCustomEvent(VideoEvents.PLAY); break; case 'paused': this.element.dispatchCustomEvent(VideoEvents.PAUSE); break; case 'muted': this.element.dispatchCustomEvent(VideoEvents.MUTED); break; case 'unmuted': this.element.dispatchCustomEvent(VideoEvents.UNMUTED); break; } } sdnPostMessage_(message) { this.playerReadyPromise_.then(() => { if (this.iframe_ && this.iframe_.contentWindow) { this.iframe_.contentWindow./*OK*/postMessage(message, '*'); } }); } // VideoInterface Implementation. See ../src/video-interface.VideoInterface /** @override */ play() { this.sdnPostMessage_('play2'); } /** @override */ pause() { this.sdnPostMessage_('pause'); } /** @override */ mute() { this.sdnPostMessage_('mute'); } /** @override */ unmute() { this.sdnPostMessage_('unmute'); } /** @override */ supportsPlatform() { return true; } /** @override */ isInteractive() { return true; } /** @override */ showControls() { this.sdnPostMessage_('showControlbar'); } /** @override */ hideControls() { this.sdnPostMessage_('hideControlbar'); } }; AMP.registerElement('amp-3q-player', Amp3QPlayer);
ecoron/amphtml
extensions/amp-3q-player/0.1/amp-3q-player.js
JavaScript
apache-2.0
5,238
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.flex.forks.batik.svggen; import java.awt.Composite; import java.awt.Paint; import java.awt.Rectangle; import java.awt.image.BufferedImageOp; /** * The ExtensionHandler interface allows the user to handle * Java 2D API extensions that map to SVG concepts (such as custom * Paints, Composites or BufferedImageOp filters). * * @author <a href="mailto:vincent.hardy@eng.sun.com">Vincent Hardy</a> * @version $Id: ExtensionHandler.java 478176 2006-11-22 14:50:50Z dvholten $ */ public interface ExtensionHandler { /** * @param paint Custom Paint to be converted to SVG * @param generatorContext allows the handler to build DOM objects as needed. * @return an SVGPaintDescriptor */ SVGPaintDescriptor handlePaint(Paint paint, SVGGeneratorContext generatorContext); /** * @param composite Custom Composite to be converted to SVG. * @param generatorContext allows the handler to build DOM objects as needed. * @return an SVGCompositeDescriptor which contains a valid SVG filter, * or null if the composite cannot be handled * */ SVGCompositeDescriptor handleComposite(Composite composite, SVGGeneratorContext generatorContext); /** * @param filter Custom filter to be converted to SVG. * @param filterRect Rectangle, in device space, that defines the area * to which filtering applies. May be null, meaning that the * area is undefined. * @param generatorContext allows the handler to build DOM objects as needed. * @return an SVGFilterDescriptor which contains a valid SVG filter, * or null if the composite cannot be handled */ SVGFilterDescriptor handleFilter(BufferedImageOp filter, Rectangle filterRect, SVGGeneratorContext generatorContext); }
adufilie/flex-sdk
modules/thirdparty/batik/sources/org/apache/flex/forks/batik/svggen/ExtensionHandler.java
Java
apache-2.0
2,772
/** * Coder for Raspberry Pi * A simple platform for experimenting with web stuff. * http://goo.gl/coder * * Copyright 2013 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ exports.settings={}; //These are dynamically updated by the runtime //settings.appname - the app id (folder) where your app is installed //settings.viewpath - prefix to where your view html files are located //settings.staticurl - base url path to static assets /static/apps/appname //settings.appurl - base url path to this app /app/appname exports.get_routes = [ { path:'/', handler:'index_handler' }, ]; exports.post_routes = [ ]; exports.index_handler = function( req, res ) { var tmplvars = {}; tmplvars['static_url'] = exports.settings.staticurl; tmplvars['app_name'] = exports.settings.appname; tmplvars['app_url'] = exports.settings.appurl; tmplvars['device_name'] = exports.settings.device_name; res.render( exports.settings.viewpath + '/index', tmplvars ); }; exports.on_destroy = function() { };
jmp407/coderJmp
coder-base/apps/game2d/app.js
JavaScript
apache-2.0
1,564
/* Copyright (c) Citrix Systems, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, * with or without modification, are permitted provided * that the following conditions are met: * * * Redistributions of source code must retain the above * copyright notice, this list of conditions and the * following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the * following disclaimer in the documentation and/or other * materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Threading; using XenAdmin.Network; using XenAPI; namespace XenAdmin.Actions { /// <summary> /// ParallelAction takes a list of any number of actions and runs a certain number of them simultaneously. /// Once one simultaneous action is finished the next one in the queue is started until all are complete /// </summary> public class ParallelAction : MultipleAction { //Change parameter to increase the number of concurrent actions running private const int DEFAULT_MAX_NUMBER_OF_PARALLEL_ACTIONS = 25; private Dictionary<IXenConnection, List<AsyncAction>> actionsByConnection = new Dictionary<IXenConnection, List<AsyncAction>>(); private Dictionary<IXenConnection, ProduceConsumerQueue> queuesByConnection = new Dictionary<IXenConnection, ProduceConsumerQueue>(); private List<AsyncAction> actionsWithNoConnection = new List<AsyncAction>(); private ProduceConsumerQueue queueWithNoConnection; private readonly int maxNumberOfParallelActions; private int actionsCount; public ParallelAction(IXenConnection connection, string title, string startDescription, string endDescription, List<AsyncAction> subActions, bool suppressHistory, bool showSubActionsDetails, int maxNumberOfParallelActions = DEFAULT_MAX_NUMBER_OF_PARALLEL_ACTIONS) : base(connection, title, startDescription, endDescription, subActions, suppressHistory, showSubActionsDetails) { if (Connection != null) { actionsByConnection.Add(Connection, subActions); actionsCount = subActions.Count; } else GroupActionsByConnection(); this.maxNumberOfParallelActions = maxNumberOfParallelActions; } public ParallelAction(IXenConnection connection, string title, string startDescription, string endDescription, List<AsyncAction> subActions, int maxNumberOfParallelActions = DEFAULT_MAX_NUMBER_OF_PARALLEL_ACTIONS) : this(connection, title, startDescription, endDescription, subActions, false, false, maxNumberOfParallelActions) { } /// <summary> /// Use this constructor to create a cross connection ParallelAction. /// It takes a list of any number of actions, separates them by connections /// and runs a certain number of them simultaneously on each connection, all connections in parallel. /// Once one simultaneous action is finished the next one in the queue is started until all are complete. /// </summary> public ParallelAction(string title, string startDescription, string endDescription, List<AsyncAction> subActions, bool suppressHistory, bool showSubActionsDetails, int maxNumberOfParallelActions = DEFAULT_MAX_NUMBER_OF_PARALLEL_ACTIONS) : base(null, title, startDescription, endDescription, subActions, suppressHistory, showSubActionsDetails) { GroupActionsByConnection(); this.maxNumberOfParallelActions = maxNumberOfParallelActions; } public ParallelAction(string title, string startDescription, string endDescription, List<AsyncAction> subActions, int maxNumberOfParallelActions = DEFAULT_MAX_NUMBER_OF_PARALLEL_ACTIONS) : this(title, startDescription, endDescription, subActions, false, false, maxNumberOfParallelActions) { } private void GroupActionsByConnection() { actionsCount = 0; foreach (AsyncAction action in subActions) { if (action.Connection != null) { if (action.Connection.IsConnected) { if (!actionsByConnection.ContainsKey(action.Connection)) { actionsByConnection.Add(action.Connection, new List<AsyncAction>()); } actionsByConnection[action.Connection].Add(action); actionsCount++; } } else { actionsWithNoConnection.Add(action); actionsCount++; } } } protected override void RunSubActions(List<Exception> exceptions) { if (actionsCount == 0) return; foreach (IXenConnection connection in actionsByConnection.Keys) { queuesByConnection[connection] = new ProduceConsumerQueue(Math.Min(maxNumberOfParallelActions, actionsByConnection[connection].Count)); foreach (AsyncAction subAction in actionsByConnection[connection]) { EnqueueAction(subAction, queuesByConnection[connection], exceptions); } } if (actionsWithNoConnection.Count > 0) queueWithNoConnection = new ProduceConsumerQueue(Math.Min(maxNumberOfParallelActions, actionsWithNoConnection.Count)); foreach (AsyncAction subAction in actionsWithNoConnection) { EnqueueAction(subAction, queueWithNoConnection, exceptions); } lock (_lock) { Monitor.Wait(_lock); } } void EnqueueAction(AsyncAction action, ProduceConsumerQueue queue, List<Exception> exceptions) { action.Completed += action_Completed; queue.EnqueueItem( () => { if (Cancelling) // don't start any more actions return; try { action.RunExternal(action.Session); } catch (Exception e) { Failure f = e as Failure; if (f != null && Connection != null && f.ErrorDescription[0] == Failure.RBAC_PERMISSION_DENIED) { Failure.ParseRBACFailure(f, action.Connection, action.Session ?? action.Connection.Session); } exceptions.Add(e); // Record the first exception we come to. Though later if there are more than one we will replace this with non specific one. if (Exception == null) Exception = e; } }); } protected override void RecalculatePercentComplete() { int total = 0; foreach (IXenConnection connection in actionsByConnection.Keys) { foreach (var action in actionsByConnection[connection]) total += action.PercentComplete; } foreach (var action in actionsWithNoConnection) total += action.PercentComplete; PercentComplete = (int)(total / actionsCount); } private readonly object _lock = new object(); private volatile int i = 0; void action_Completed(ActionBase sender) { sender.Completed -= action_Completed; lock (_lock) { i++; if (i == actionsCount) { Monitor.Pulse(_lock); PercentComplete = 100; } } } protected override void MultipleAction_Completed(ActionBase sender) { base.MultipleAction_Completed(sender); foreach (IXenConnection connection in queuesByConnection.Keys) { queuesByConnection[connection].StopWorkers(false); } if (queueWithNoConnection != null) queueWithNoConnection.StopWorkers(false); } } }
geosharath/xenadmin
XenModel/Actions/ParallelAction.cs
C#
bsd-2-clause
9,686
cask 'malwarebytes' do version '3.0.3.433' sha256 'ab592edc1aec714d009455fe7b53a759dd2f0cc21e3b74697a028979ef5d50f7' # data-cdn.mbamupdates.com/web was verified as official when first introduced to the cask url "https://data-cdn.mbamupdates.com/web/mb#{version.major}_mac/Malwarebytes-#{version}.dmg" name 'Malwarebytes for Mac' homepage 'https://www.malwarebytes.com/mac/' auto_updates true depends_on macos: '>= :yosemite' pkg "Install Malwarebytes #{version.major}.pkg" uninstall delete: '/Library/Application Support/Malwarebytes/MBAM', kext: 'com.malwarebytes.mbam.rtprotection', launchctl: [ 'com.malwarebytes.mbam.frontend.agent', 'com.malwarebytes.mbam.rtprotection.daemon', 'com.malwarebytes.mbam.settings.daemon', ], pkgutil: 'com.malwarebytes.mbam', quit: 'com.malwarebytes.mbam.frontend.agent', rmdir: '/Library/Application Support/Malwarebytes' end
shonjir/homebrew-cask
Casks/malwarebytes.rb
Ruby
bsd-2-clause
1,063
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. /** * Asserts that device property values match properties in |expectedProperties|. * The method will *not* assert that the device contains *only* properties * specified in expected properties. * @param {Object} expectedProperties Expected device properties. * @param {Object} device Device object to test. */ function assertDeviceMatches(expectedProperties, device) { Object.keys(expectedProperties).forEach(function(key) { chrome.test.assertEq(expectedProperties[key], device[key], 'Property ' + key + ' of device ' + device.id); }); } /** * Verifies that list of devices contains all and only devices from set of * expected devices. If will fail the test if an unexpected device is found. * * @param {Object.<string, Object>} expectedDevices Expected set of test * devices. Maps device ID to device properties. * @param {Array.<Object>} devices List of input devices. */ function assertDevicesMatch(expectedDevices, devices) { var deviceIds = {}; devices.forEach(function(device) { chrome.test.assertFalse(!!deviceIds[device.id], 'Duplicated device id: \'' + device.id + '\'.'); deviceIds[device.id] = true; }); function sortedKeys(obj) { return Object.keys(obj).sort(); } chrome.test.assertEq(sortedKeys(expectedDevices), sortedKeys(deviceIds)); devices.forEach(function(device) { assertDeviceMatches(expectedDevices[device.id], device); }); } /** * * @param {Array.<Object>} devices List of devices returned by * chrome.audio.getInfo or chrome.audio.getDevices. * @return {Object.<string, Object>} List of devices formatted as map of * expected devices used to assert devices match expectation. */ function deviceListToExpectedDevicesMap(devices) { var expectedDevicesMap = {}; devices.forEach(function(device) { expectedDevicesMap[device.id] = device; }); return expectedDevicesMap; } /** * @param {Array.<Object>} devices List of devices returned by * chrome.audio.getInfo or chrome.audio.getDevices. * @return {Array.<string>} Sorted list devices IDs for devices in |devices|. */ function getDeviceIds(devices) { return devices.map(function(device) {return device.id;}).sort(); } function EventListener(targetEvent) { this.targetEvent = targetEvent; this.listener = this.handleEvent.bind(this); this.targetEvent.addListener(this.listener); this.eventCount = 0; } EventListener.prototype.handleEvent = function() { ++this.eventCount; } EventListener.prototype.reset = function() { this.targetEvent.removeListener(this.listener); } var deviceChangedListener = null; chrome.test.runTests([ // Sets up a listener for audio.onDeviceChanged event - // |verifyNoDeviceChangedEvents| test will later verify that no // onDeviceChanged events have been observed. function startDeviceChangedListener() { deviceChangedListener = new EventListener(chrome.audio.onDeviceChanged); chrome.test.succeed(); }, function getDevicesTest() { // Test output devices. Maps device ID -> tested device properties. var kTestDevices = { '30001': { id: '30001', stableDeviceId: '0', displayName: 'Jabra Speaker 1', deviceName: 'Jabra Speaker', streamType: 'OUTPUT' }, '30002': { id: '30002', stableDeviceId: '1', displayName: 'Jabra Speaker 2', deviceName: 'Jabra Speaker', streamType: 'OUTPUT' }, '30003': { id: '30003', stableDeviceId: '2', displayName: 'HDA Intel MID', deviceName: 'HDMI output', streamType: 'OUTPUT' }, '40001': { id: '40001', stableDeviceId: '3', displayName: 'Jabra Mic 1', deviceName: 'Jabra Mic', streamType: 'INPUT' }, '40002': { id: '40002', stableDeviceId: '4', displayName: 'Jabra Mic 2', deviceName: 'Jabra Mic', streamType: 'INPUT' }, '40003': { id: '40003', stableDeviceId: '5', displayName: 'Logitech Webcam', deviceName: 'Webcam Mic', streamType: 'INPUT' } }; chrome.audio.getDevices(chrome.test.callbackPass(function(devices) { assertDevicesMatch(kTestDevices, devices); })); }, function getDevicesWithEmptyFilterTest() { // Test output devices. Maps device ID -> tested device properties. var kTestDevices = { '30001': { id: '30001', stableDeviceId: '0', displayName: 'Jabra Speaker 1', deviceName: 'Jabra Speaker', streamType: 'OUTPUT' }, '30002': { id: '30002', stableDeviceId: '1', displayName: 'Jabra Speaker 2', deviceName: 'Jabra Speaker', streamType: 'OUTPUT' }, '30003': { id: '30003', stableDeviceId: '2', displayName: 'HDA Intel MID', deviceName: 'HDMI output', streamType: 'OUTPUT' }, '40001': { id: '40001', stableDeviceId: '3', displayName: 'Jabra Mic 1', deviceName: 'Jabra Mic', streamType: 'INPUT' }, '40002': { id: '40002', stableDeviceId: '4', displayName: 'Jabra Mic 2', deviceName: 'Jabra Mic', streamType: 'INPUT' }, '40003': { id: '40003', stableDeviceId: '5', displayName: 'Logitech Webcam', deviceName: 'Webcam Mic', streamType: 'INPUT' } }; chrome.audio.getDevices({}, chrome.test.callbackPass(function(devices) { assertDevicesMatch(kTestDevices, devices); })); }, function getInputDevicesTest() { var kTestDevices = { '40001': { id: '40001', streamType: 'INPUT' }, '40002': { id: '40002', streamType: 'INPUT' }, '40003': { id: '40003', streamType: 'INPUT' } }; chrome.audio.getDevices({ streamTypes: ['INPUT'] }, chrome.test.callbackPass(function(devices) { assertDevicesMatch(kTestDevices, devices); })); }, function getOutputDevicesTest() { var kTestDevices = { '30001': { id: '30001', streamType: 'OUTPUT' }, '30002': { id: '30002', streamType: 'OUTPUT' }, '30003': { id: '30003', streamType: 'OUTPUT' }, }; chrome.audio.getDevices({ streamTypes: ['OUTPUT'] }, chrome.test.callbackPass(function(devices) { assertDevicesMatch(kTestDevices, devices); })); }, function getActiveDevicesTest() { chrome.audio.getDevices(chrome.test.callbackPass(function(initial) { var initialActiveDevices = initial.filter(function(device) { return device.isActive; }); chrome.test.assertTrue(initialActiveDevices.length > 0); chrome.audio.getDevices({ isActive: true }, chrome.test.callbackPass(function(devices) { assertDevicesMatch( deviceListToExpectedDevicesMap(initialActiveDevices), devices); })); var initialActiveInputs = initialActiveDevices.filter(function(device) { return device.streamType === 'INPUT'; }); chrome.test.assertTrue(initialActiveInputs.length > 0); chrome.audio.getDevices({ isActive: true, streamTypes: ['INPUT'] }, chrome.test.callbackPass(function(devices) { assertDevicesMatch( deviceListToExpectedDevicesMap(initialActiveInputs), devices); })); var initialActiveOutputs = initialActiveDevices.filter(function(device) { return device.streamType === 'OUTPUT'; }); chrome.test.assertTrue(initialActiveOutputs.length > 0); chrome.audio.getDevices({ isActive: true, streamTypes: ['OUTPUT'] }, chrome.test.callbackPass(function(devices) { assertDevicesMatch( deviceListToExpectedDevicesMap(initialActiveOutputs), devices); })); })); }, function getInactiveDevicesTest() { chrome.audio.getDevices(chrome.test.callbackPass(function(initial) { var initialInactiveDevices = initial.filter(function(device) { return !device.isActive; }); chrome.test.assertTrue(initialInactiveDevices.length > 0); chrome.audio.getDevices({ isActive: false }, chrome.test.callbackPass(function(devices) { assertDevicesMatch( deviceListToExpectedDevicesMap(initialInactiveDevices), devices); })); })); }, function setPropertiesTest() { chrome.audio.getDevices(chrome.test.callbackPass(function(initial) { var expectedDevices = deviceListToExpectedDevicesMap(initial); // Update expected input devices with values that should be changed in // test. var updatedInput = expectedDevices['40002']; chrome.test.assertFalse(updatedInput.gain === 65); updatedInput.level = 65; // Update expected output devices with values that should be changed in // test. var updatedOutput = expectedDevices['30001']; chrome.test.assertFalse(updatedOutput.volume === 45); updatedOutput.level = 45; chrome.audio.setProperties('30001', { level: 45 }, chrome.test.callbackPass(function() { chrome.audio.setProperties('40002', { level: 65 }, chrome.test.callbackPass(function() { chrome.audio.getDevices(chrome.test.callbackPass(function(devices) { assertDevicesMatch(expectedDevices, devices); })); })); })); })); }, function inputMuteTest() { var getMute = function(callback) { chrome.audio.getMute('INPUT', chrome.test.callbackPass(callback)); }; getMute(function(originalValue) { chrome.audio.setMute( 'INPUT', !originalValue, chrome.test.callbackPass(function() { getMute(function(value) { chrome.test.assertEq(!originalValue, value); }); })); }); }, function outputMuteTest() { var getMute = function(callback) { chrome.audio.getMute('OUTPUT', chrome.test.callbackPass(callback)); }; getMute(function(originalValue) { chrome.audio.setMute( 'OUTPUT', !originalValue, chrome.test.callbackPass(function() { getMute(function(value) { chrome.test.assertEq(!originalValue, value); }); })); }); }, function setActiveDevicesTest() { chrome.audio.setActiveDevices({ input: ['40002', '40003'], output: ['30001'] }, chrome.test.callbackPass(function() { chrome.audio.getDevices({ isActive: true }, chrome.test.callbackPass(function(activeDevices) { chrome.test.assertEq(['30001', '40002', '40003'], getDeviceIds(activeDevices)); })); })); }, function setActiveDevicesOutputOnlyTest() { chrome.audio.getDevices({ streamTypes: ['INPUT'], isActive: true }, chrome.test.callbackPass(function(initial) { var initialActiveInputs = getDeviceIds(initial); chrome.test.assertTrue(initialActiveInputs.length > 0); chrome.audio.setActiveDevices({ output: ['30003'] }, chrome.test.callbackPass(function() { chrome.audio.getDevices({ isActive: true }, chrome.test.callbackPass(function(devices) { var expected = ['30003'].concat(initialActiveInputs).sort(); chrome.test.assertEq(expected, getDeviceIds(devices)); })); })); })); }, function setActiveDevicesFailInputTest() { chrome.audio.getDevices({ isActive: true }, chrome.test.callbackPass(function(initial) { var initialActiveIds = getDeviceIds(initial); chrome.test.assertTrue(initialActiveIds.length > 0); chrome.audio.setActiveDevices({ input: ['0000000'], /* does not exist */ output: [] }, chrome.test.callbackFail('Failed to set active devices.', function() { chrome.audio.getDevices({ isActive: true }, chrome.test.callbackPass(function(devices) { chrome.test.assertEq(initialActiveIds, getDeviceIds(devices)); })); })); })); }, function setActiveDevicesFailOutputTest() { chrome.audio.getDevices({ isActive: true }, chrome.test.callbackPass(function(initial) { var initialActiveIds = getDeviceIds(initial); chrome.test.assertTrue(initialActiveIds.length > 0); chrome.audio.setActiveDevices({ input: [], output: ['40001'] /* id is input node ID */ }, chrome.test.callbackFail('Failed to set active devices.', function() { chrome.audio.getDevices({ isActive: true }, chrome.test.callbackPass(function(devices) { chrome.test.assertEq(initialActiveIds, getDeviceIds(devices)); })); })); })); }, function clearActiveDevicesTest() { chrome.audio.getDevices({ isActive: true }, chrome.test.callbackPass(function(initial) { chrome.test.assertTrue(getDeviceIds(initial).length > 0); chrome.audio.setActiveDevices({ input: [], output: [] }, chrome.test.callbackPass(function() { chrome.audio.getDevices({ isActive: true }, chrome.test.callbackPass(function(devices) { chrome.test.assertEq([], devices); })); })); })); }, function verifyNoDeviceChangedEvents() { chrome.test.assertTrue(!!deviceChangedListener); chrome.test.assertEq(0, deviceChangedListener.eventCount); deviceChangedListener.reset(); deviceChangedListener = null; chrome.test.succeed(); }, // Tests verifying the app doesn't have access to deprecated part of the API: function deprecated_GetInfoTest() { chrome.audio.getInfo(chrome.test.callbackFail( 'audio.getInfo is deprecated, use audio.getDevices instead.')); }, function deprecated_setProperties_isMuted() { chrome.audio.getDevices(chrome.test.callbackPass(function(initial) { var expectedDevices = deviceListToExpectedDevicesMap(initial); var expectedError = '|isMuted| property is deprecated, use |audio.setMute|.'; chrome.audio.setProperties('30001', { isMuted: true, // Output device - should have volume set. level: 55 }, chrome.test.callbackFail(expectedError, function() { // Assert that device properties haven't changed. chrome.audio.getDevices(chrome.test.callbackPass(function(devices) { assertDevicesMatch(expectedDevices, devices); })); })); })); }, function deprecated_setProperties_volume() { chrome.audio.getDevices(chrome.test.callbackPass(function(initial) { var expectedDevices = deviceListToExpectedDevicesMap(initial); var expectedError = '|volume| property is deprecated, use |level|.'; chrome.audio.setProperties('30001', { volume: 2, // Output device - should have volume set. level: 55 }, chrome.test.callbackFail(expectedError, function() { // Assert that device properties haven't changed. chrome.audio.getDevices(chrome.test.callbackPass(function(devices) { assertDevicesMatch(expectedDevices, devices); })); })); })); }, function deprecated_setProperties_gain() { chrome.audio.getDevices(chrome.test.callbackPass(function(initial) { var expectedDevices = deviceListToExpectedDevicesMap(initial); var expectedError = '|gain| property is deprecated, use |level|.'; chrome.audio.setProperties('40001', { gain: 2, // Output device - should have volume set. level: 55 }, chrome.test.callbackFail(expectedError, function() { // Assert that device properties haven't changed. chrome.audio.getDevices(chrome.test.callbackPass(function(devices) { assertDevicesMatch(expectedDevices, devices); })); })); })); }, function deprecated_SetActiveDevicesTest() { var kExpectedError = 'String list |ids| is deprecated, use DeviceIdLists type.'; chrome.audio.setActiveDevices([ '30003', '40002' ], chrome.test.callbackFail(kExpectedError)); }, ]);
scheib/chromium
extensions/test/data/api_test/audio/test.js
JavaScript
bsd-3-clause
16,571
// // This macro reads the Hits Tree // void AliPMDHitsRead(Int_t nevt = 1) { TStopwatch timer; timer.Start(); TH2F *h2 = new TH2F("h2"," Y vs. X",200,-100.,100.,200,-100.,100.); // FILE *fpw = fopen("alipmdhits.dat","w"); AliRunLoader *fRunLoader = AliRunLoader::Open("galice.root"); if (!fRunLoader) { printf("Can not open session for file "); } if (!fRunLoader->GetAliRun()) fRunLoader->LoadgAlice(); if (!fRunLoader->TreeE()) fRunLoader->LoadHeader(); if (!fRunLoader->TreeK()) fRunLoader->LoadKinematics(); gAlice = fRunLoader->GetAliRun(); if (gAlice) { printf("Alirun object found\n"); } else { printf("Could not found Alirun object\n"); } fPMD = (AliPMD*)gAlice->GetDetector("PMD"); fPMDLoader = fRunLoader->GetLoader("PMDLoader"); if (fPMDLoader == 0x0) { printf("Can not find PMDLoader\n"); } fPMDLoader->LoadHits("READ"); // This reads the PMD Hits tree and assigns the right track number // to a cell and stores in the summable digits tree // const Int_t kPi0 = 111; const Int_t kGamma = 22; Int_t npmd; Int_t trackno; Int_t smnumber; Int_t trackpid; Int_t mtrackno; Int_t mtrackpid; Int_t xpad = -1, ypad = -1; Float_t edep; Float_t vx = -999.0, vy = -999.0, vz = -999.0; Float_t xPos, yPos, zPos; Float_t xx, yy; AliPMDUtility cc; for (Int_t ievt = 0; ievt < nevt; ievt++) { printf("Event Number = %d\n",ievt); Int_t nparticles = fRunLoader->GetHeader()->GetNtrack(); printf("Number of Particles = %d\n",nparticles); fRunLoader->GetEvent(ievt); // ------------------------------------------------------- // // Pointer to specific detector hits. // Get pointers to Alice detectors and Hits containers TTree* treeH = fPMDLoader->TreeH(); Int_t ntracks = (Int_t) treeH->GetEntries(); printf("Number of Tracks in the TreeH = %d\n", ntracks); TClonesArray* hits = 0; if (fPMD) hits = fPMD->Hits(); // Start loop on tracks in the hits containers for (Int_t track=0; track<ntracks;track++) { gAlice->GetMCApp()->ResetHits(); treeH->GetEvent(track); if (fPMD) { npmd = hits->GetEntriesFast(); for (int ipmd = 0; ipmd < npmd; ipmd++) { fPMDHit = (AliPMDhit*) hits->UncheckedAt(ipmd); trackno = fPMDHit->GetTrack(); //fprintf(fpw,"trackno = %d\n",trackno); // get kinematics of the particles TParticle* mparticle = gAlice->GetMCApp()->Particle(trackno); trackpid = mparticle->GetPdgCode(); Int_t igatr = -999; Int_t ichtr = -999; Int_t igapid = -999; Int_t imo; Int_t igen = 0; Int_t idmo = -999; Int_t tracknoOld=0, trackpidOld=0, statusOld = 0; if (mparticle->GetFirstMother() == -1) { tracknoOld = trackno; trackpidOld = trackpid; statusOld = -1; vx = mparticle->Vx(); vy = mparticle->Vy(); vz = mparticle->Vz(); //fprintf(fpw,"==> Mother ID %5d %5d %5d Vertex: %13.3f %13.3f %13.3f\n", igen, -1, trackpid, vx, vy, vz); } Int_t igstatus = 0; while((imo = mparticle->GetFirstMother()) >= 0) { igen++; mparticle = gAlice->GetMCApp()->Particle(imo); idmo = mparticle->GetPdgCode(); vx = mparticle->Vx(); vy = mparticle->Vy(); vz = mparticle->Vz(); //printf("==> Mother ID %5d %5d %5d Vertex: %13.3f %13.3f %13.3f\n", igen, imo, idmo, vx, vy, vz); //fprintf(fpw,"==> Mother ID %5d %5d %5d Vertex: %13.3f %13.3f %13.3f\n", igen, imo, idmo, vx, vy, vz); if ((idmo == kGamma || idmo == -11 || idmo == 11) && vx == 0. && vy == 0. && vz == 0.) { igatr = imo; igapid = idmo; igstatus = 1; } if(igstatus == 0) { if (idmo == kPi0 && vx == 0. && vy == 0. && vz == 0.) { igatr = imo; igapid = idmo; } } ichtr = imo; } // end of while loop if (idmo == kPi0 && vx == 0. && vy == 0. && vz == 0.) { mtrackno = igatr; mtrackpid = igapid; } else { mtrackno = ichtr; mtrackpid = idmo; } if (statusOld == -1) { mtrackno = tracknoOld; mtrackpid = trackpidOld; } //printf("mtrackno = %d mtrackpid = %d\n",mtrackno,mtrackpid); xPos = fPMDHit->X(); yPos = fPMDHit->Y(); zPos = fPMDHit->Z(); Float_t time = fPMDHit->GetTime(); printf("++++++++++ time = %f\n",time); edep = fPMDHit->GetEnergy(); Int_t vol1 = fPMDHit->GetVolume(1); // Column Int_t vol2 = fPMDHit->GetVolume(2); // Row Int_t vol3 = fPMDHit->GetVolume(4); // UnitModule // -----------------------------------------// // For Super Module 1 & 2 // // nrow = 96, ncol = 48 // // For Super Module 3 & 4 // // nrow = 48, ncol = 96 // // -----------------------------------------// if (vol3 < 24) { smnumber = vol3; } else { smnumber = vol3 - 24; } xpad = vol1; ypad = vol2; if(zPos > 361.5) { cc.RectGeomCellPos(smnumber,xpad,ypad,xx,yy); h2->Fill(xx,yy); } } } } // Track Loop ended } h2->Draw(); fRunLoader->UnloadgAlice(); fRunLoader->UnloadHeader(); fRunLoader->UnloadKinematics(); fPMDLoader->UnloadHits(); timer.Stop(); timer.Print(); }
ecalvovi/AliRoot
PMD/macro/AliPMDHitsRead.C
C++
bsd-3-clause
5,688
#!/usr/bin/env python # Copyright (c) 2017 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Valid certificate chain where the target certificate contains a public key with a 512-bit modulus (weak).""" import sys sys.path += ['../..'] import gencerts # Self-signed root certificate. root = gencerts.create_self_signed_root_certificate('Root') # Intermediate intermediate = gencerts.create_intermediate_certificate('Intermediate', root) # Target certificate. target = gencerts.create_end_entity_certificate('Target', intermediate) target.set_key(gencerts.get_or_generate_rsa_key( 512, gencerts.create_key_path(target.name))) chain = [target, intermediate, root] gencerts.write_chain(__doc__, chain, 'chain.pem')
nwjs/chromium.src
net/data/verify_certificate_chain_unittest/target-has-512bit-rsa-key/generate-chains.py
Python
bsd-3-clause
820
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/public/browser/render_process_host_creation_observer.h" #include "content/browser/renderer_host/render_process_host_impl.h" namespace content { RenderProcessHostCreationObserver::RenderProcessHostCreationObserver() { RenderProcessHostImpl::RegisterCreationObserver(this); } RenderProcessHostCreationObserver::~RenderProcessHostCreationObserver() { RenderProcessHostImpl::UnregisterCreationObserver(this); } } // namespace content
endlessm/chromium-browser
content/browser/renderer_host/render_process_host_creation_observer.cc
C++
bsd-3-clause
624
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ui/views/widget/desktop_aura/x11_topmost_window_finder.h" #include <algorithm> #include <vector> #include <X11/extensions/shape.h> #include <X11/Xlib.h> #include <X11/Xregion.h> // Get rid of X11 macros which conflict with gtest. #undef Bool #undef None #include "base/memory/scoped_ptr.h" #include "base/path_service.h" #include "third_party/skia/include/core/SkRect.h" #include "third_party/skia/include/core/SkRegion.h" #include "ui/aura/window.h" #include "ui/aura/window_tree_host.h" #include "ui/base/resource/resource_bundle.h" #include "ui/base/ui_base_paths.h" #include "ui/events/platform/x11/x11_event_source.h" #include "ui/gfx/path.h" #include "ui/gfx/path_x11.h" #include "ui/gfx/x/x11_atom_cache.h" #include "ui/gl/test/gl_surface_test_support.h" #include "ui/views/test/views_test_base.h" #include "ui/views/test/x11_property_change_waiter.h" #include "ui/views/widget/desktop_aura/desktop_native_widget_aura.h" #include "ui/views/widget/desktop_aura/x11_desktop_handler.h" #include "ui/views/widget/widget.h" namespace views { namespace { // Waits till |window| is minimized. class MinimizeWaiter : public X11PropertyChangeWaiter { public: explicit MinimizeWaiter(XID window) : X11PropertyChangeWaiter(window, "_NET_WM_STATE") { const char* kAtomsToCache[] = { "_NET_WM_STATE_HIDDEN", NULL }; atom_cache_.reset(new ui::X11AtomCache(gfx::GetXDisplay(), kAtomsToCache)); } ~MinimizeWaiter() override {} private: // X11PropertyChangeWaiter: bool ShouldKeepOnWaiting(const ui::PlatformEvent& event) override { std::vector<Atom> wm_states; if (ui::GetAtomArrayProperty(xwindow(), "_NET_WM_STATE", &wm_states)) { std::vector<Atom>::iterator it = std::find( wm_states.begin(), wm_states.end(), atom_cache_->GetAtom("_NET_WM_STATE_HIDDEN")); return it == wm_states.end(); } return true; } scoped_ptr<ui::X11AtomCache> atom_cache_; DISALLOW_COPY_AND_ASSIGN(MinimizeWaiter); }; // Waits till |_NET_CLIENT_LIST_STACKING| is updated to include // |expected_windows|. class StackingClientListWaiter : public X11PropertyChangeWaiter { public: StackingClientListWaiter(XID* expected_windows, size_t count) : X11PropertyChangeWaiter(ui::GetX11RootWindow(), "_NET_CLIENT_LIST_STACKING"), expected_windows_(expected_windows, expected_windows + count) { } ~StackingClientListWaiter() override {} // X11PropertyChangeWaiter: void Wait() override { // StackingClientListWaiter may be created after // _NET_CLIENT_LIST_STACKING already contains |expected_windows|. if (!ShouldKeepOnWaiting(NULL)) return; X11PropertyChangeWaiter::Wait(); } private: // X11PropertyChangeWaiter: bool ShouldKeepOnWaiting(const ui::PlatformEvent& event) override { std::vector<XID> stack; ui::GetXWindowStack(ui::GetX11RootWindow(), &stack); for (size_t i = 0; i < expected_windows_.size(); ++i) { std::vector<XID>::iterator it = std::find( stack.begin(), stack.end(), expected_windows_[i]); if (it == stack.end()) return true; } return false; } std::vector<XID> expected_windows_; DISALLOW_COPY_AND_ASSIGN(StackingClientListWaiter); }; } // namespace class X11TopmostWindowFinderTest : public ViewsTestBase { public: X11TopmostWindowFinderTest() { } ~X11TopmostWindowFinderTest() override {} // Creates and shows a Widget with |bounds|. The caller takes ownership of // the returned widget. scoped_ptr<Widget> CreateAndShowWidget(const gfx::Rect& bounds) { scoped_ptr<Widget> toplevel(new Widget); Widget::InitParams params = CreateParams(Widget::InitParams::TYPE_WINDOW); params.ownership = Widget::InitParams::WIDGET_OWNS_NATIVE_WIDGET; params.native_widget = new DesktopNativeWidgetAura(toplevel.get()); params.bounds = bounds; params.remove_standard_frame = true; toplevel->Init(params); toplevel->Show(); return toplevel.Pass(); } // Creates and shows an X window with |bounds|. XID CreateAndShowXWindow(const gfx::Rect& bounds) { XID root = DefaultRootWindow(xdisplay()); XID xid = XCreateSimpleWindow(xdisplay(), root, 0, 0, 1, 1, 0, // border_width 0, // border 0); // background ui::SetUseOSWindowFrame(xid, false); ShowAndSetXWindowBounds(xid, bounds); return xid; } // Shows |xid| and sets its bounds. void ShowAndSetXWindowBounds(XID xid, const gfx::Rect& bounds) { XMapWindow(xdisplay(), xid); XWindowChanges changes = {0}; changes.x = bounds.x(); changes.y = bounds.y(); changes.width = bounds.width(); changes.height = bounds.height(); XConfigureWindow(xdisplay(), xid, CWX | CWY | CWWidth | CWHeight, &changes); } Display* xdisplay() { return gfx::GetXDisplay(); } // Returns the topmost X window at the passed in screen position. XID FindTopmostXWindowAt(int screen_x, int screen_y) { X11TopmostWindowFinder finder; return finder.FindWindowAt(gfx::Point(screen_x, screen_y)); } // Returns the topmost aura::Window at the passed in screen position. Returns // NULL if the topmost window does not have an associated aura::Window. aura::Window* FindTopmostLocalProcessWindowAt(int screen_x, int screen_y) { X11TopmostWindowFinder finder; return finder.FindLocalProcessWindowAt(gfx::Point(screen_x, screen_y), std::set<aura::Window*>()); } // Returns the topmost aura::Window at the passed in screen position ignoring // |ignore_window|. Returns NULL if the topmost window does not have an // associated aura::Window. aura::Window* FindTopmostLocalProcessWindowWithIgnore( int screen_x, int screen_y, aura::Window* ignore_window) { std::set<aura::Window*> ignore; ignore.insert(ignore_window); X11TopmostWindowFinder finder; return finder.FindLocalProcessWindowAt(gfx::Point(screen_x, screen_y), ignore); } static void SetUpTestCase() { gfx::GLSurfaceTestSupport::InitializeOneOff(); ui::RegisterPathProvider(); base::FilePath ui_test_pak_path; ASSERT_TRUE(PathService::Get(ui::UI_TEST_PAK, &ui_test_pak_path)); ui::ResourceBundle::InitSharedInstanceWithPakPath(ui_test_pak_path); } // ViewsTestBase: void SetUp() override { ViewsTestBase::SetUp(); // Make X11 synchronous for our display connection. This does not force the // window manager to behave synchronously. XSynchronize(xdisplay(), True); // Ensure that the X11DesktopHandler exists. The X11DesktopHandler is // necessary to properly track menu windows. X11DesktopHandler::get(); } void TearDown() override { XSynchronize(xdisplay(), False); ViewsTestBase::TearDown(); } private: DISALLOW_COPY_AND_ASSIGN(X11TopmostWindowFinderTest); }; TEST_F(X11TopmostWindowFinderTest, Basic) { // Avoid positioning test windows at 0x0 because window managers often have a // panel/launcher along one of the screen edges and do not allow windows to // position themselves to overlap the panel/launcher. scoped_ptr<Widget> widget1( CreateAndShowWidget(gfx::Rect(100, 100, 200, 100))); aura::Window* window1 = widget1->GetNativeWindow(); XID xid1 = window1->GetHost()->GetAcceleratedWidget(); XID xid2 = CreateAndShowXWindow(gfx::Rect(200, 100, 100, 200)); scoped_ptr<Widget> widget3( CreateAndShowWidget(gfx::Rect(100, 190, 200, 110))); aura::Window* window3 = widget3->GetNativeWindow(); XID xid3 = window3->GetHost()->GetAcceleratedWidget(); XID xids[] = { xid1, xid2, xid3 }; StackingClientListWaiter waiter(xids, arraysize(xids)); waiter.Wait(); ui::X11EventSource::GetInstance()->DispatchXEvents(); EXPECT_EQ(xid1, FindTopmostXWindowAt(150, 150)); EXPECT_EQ(window1, FindTopmostLocalProcessWindowAt(150, 150)); EXPECT_EQ(xid2, FindTopmostXWindowAt(250, 150)); EXPECT_EQ(NULL, FindTopmostLocalProcessWindowAt(250, 150)); EXPECT_EQ(xid3, FindTopmostXWindowAt(250, 250)); EXPECT_EQ(window3, FindTopmostLocalProcessWindowAt(250, 250)); EXPECT_EQ(xid3, FindTopmostXWindowAt(150, 250)); EXPECT_EQ(window3, FindTopmostLocalProcessWindowAt(150, 250)); EXPECT_EQ(xid3, FindTopmostXWindowAt(150, 195)); EXPECT_EQ(window3, FindTopmostLocalProcessWindowAt(150, 195)); EXPECT_NE(xid1, FindTopmostXWindowAt(1000, 1000)); EXPECT_NE(xid2, FindTopmostXWindowAt(1000, 1000)); EXPECT_NE(xid3, FindTopmostXWindowAt(1000, 1000)); EXPECT_EQ(NULL, FindTopmostLocalProcessWindowAt(1000, 1000)); EXPECT_EQ(window1, FindTopmostLocalProcessWindowWithIgnore(150, 150, window3)); EXPECT_EQ(NULL, FindTopmostLocalProcessWindowWithIgnore(250, 250, window3)); EXPECT_EQ(NULL, FindTopmostLocalProcessWindowWithIgnore(150, 250, window3)); EXPECT_EQ(window1, FindTopmostLocalProcessWindowWithIgnore(150, 195, window3)); XDestroyWindow(xdisplay(), xid2); } // Test that the minimized state is properly handled. TEST_F(X11TopmostWindowFinderTest, Minimized) { scoped_ptr<Widget> widget1( CreateAndShowWidget(gfx::Rect(100, 100, 100, 100))); aura::Window* window1 = widget1->GetNativeWindow(); XID xid1 = window1->GetHost()->GetAcceleratedWidget(); XID xid2 = CreateAndShowXWindow(gfx::Rect(300, 100, 100, 100)); XID xids[] = { xid1, xid2 }; StackingClientListWaiter stack_waiter(xids, arraysize(xids)); stack_waiter.Wait(); ui::X11EventSource::GetInstance()->DispatchXEvents(); EXPECT_EQ(xid1, FindTopmostXWindowAt(150, 150)); { MinimizeWaiter minimize_waiter(xid1); XIconifyWindow(xdisplay(), xid1, 0); minimize_waiter.Wait(); } EXPECT_NE(xid1, FindTopmostXWindowAt(150, 150)); EXPECT_NE(xid2, FindTopmostXWindowAt(150, 150)); // Repeat test for an X window which does not belong to a views::Widget // because the code path is different. EXPECT_EQ(xid2, FindTopmostXWindowAt(350, 150)); { MinimizeWaiter minimize_waiter(xid2); XIconifyWindow(xdisplay(), xid2, 0); minimize_waiter.Wait(); } EXPECT_NE(xid1, FindTopmostXWindowAt(350, 150)); EXPECT_NE(xid2, FindTopmostXWindowAt(350, 150)); XDestroyWindow(xdisplay(), xid2); } // Test that non-rectangular windows are properly handled. TEST_F(X11TopmostWindowFinderTest, NonRectangular) { if (!ui::IsShapeExtensionAvailable()) return; scoped_ptr<Widget> widget1( CreateAndShowWidget(gfx::Rect(100, 100, 100, 100))); XID xid1 = widget1->GetNativeWindow()->GetHost()->GetAcceleratedWidget(); SkRegion* skregion1 = new SkRegion; skregion1->op(SkIRect::MakeXYWH(0, 10, 10, 90), SkRegion::kUnion_Op); skregion1->op(SkIRect::MakeXYWH(10, 0, 90, 100), SkRegion::kUnion_Op); // Widget takes ownership of |skregion1|. widget1->SetShape(skregion1); SkRegion skregion2; skregion2.op(SkIRect::MakeXYWH(0, 10, 10, 90), SkRegion::kUnion_Op); skregion2.op(SkIRect::MakeXYWH(10, 0, 90, 100), SkRegion::kUnion_Op); XID xid2 = CreateAndShowXWindow(gfx::Rect(300, 100, 100, 100)); gfx::XScopedPtr<REGION, gfx::XObjectDeleter<REGION, int, XDestroyRegion>> region2(gfx::CreateRegionFromSkRegion(skregion2)); XShapeCombineRegion(xdisplay(), xid2, ShapeBounding, 0, 0, region2.get(), false); XID xids[] = { xid1, xid2 }; StackingClientListWaiter stack_waiter(xids, arraysize(xids)); stack_waiter.Wait(); ui::X11EventSource::GetInstance()->DispatchXEvents(); EXPECT_EQ(xid1, FindTopmostXWindowAt(105, 120)); EXPECT_NE(xid1, FindTopmostXWindowAt(105, 105)); EXPECT_NE(xid2, FindTopmostXWindowAt(105, 105)); // Repeat test for an X window which does not belong to a views::Widget // because the code path is different. EXPECT_EQ(xid2, FindTopmostXWindowAt(305, 120)); EXPECT_NE(xid1, FindTopmostXWindowAt(305, 105)); EXPECT_NE(xid2, FindTopmostXWindowAt(305, 105)); XDestroyWindow(xdisplay(), xid2); } // Test that a window with an empty shape are properly handled. TEST_F(X11TopmostWindowFinderTest, NonRectangularEmptyShape) { if (!ui::IsShapeExtensionAvailable()) return; scoped_ptr<Widget> widget1( CreateAndShowWidget(gfx::Rect(100, 100, 100, 100))); XID xid1 = widget1->GetNativeWindow()->GetHost()->GetAcceleratedWidget(); SkRegion* skregion1 = new SkRegion; skregion1->op(SkIRect::MakeXYWH(0, 0, 0, 0), SkRegion::kUnion_Op); // Widget takes ownership of |skregion1|. widget1->SetShape(skregion1); XID xids[] = { xid1 }; StackingClientListWaiter stack_waiter(xids, arraysize(xids)); stack_waiter.Wait(); ui::X11EventSource::GetInstance()->DispatchXEvents(); EXPECT_NE(xid1, FindTopmostXWindowAt(105, 105)); } // Test that setting a Null shape removes the shape. TEST_F(X11TopmostWindowFinderTest, NonRectangularNullShape) { if (!ui::IsShapeExtensionAvailable()) return; scoped_ptr<Widget> widget1( CreateAndShowWidget(gfx::Rect(100, 100, 100, 100))); XID xid1 = widget1->GetNativeWindow()->GetHost()->GetAcceleratedWidget(); SkRegion* skregion1 = new SkRegion; skregion1->op(SkIRect::MakeXYWH(0, 0, 0, 0), SkRegion::kUnion_Op); // Widget takes ownership of |skregion1|. widget1->SetShape(skregion1); // Remove the shape - this is now just a normal window. widget1->SetShape(NULL); XID xids[] = { xid1 }; StackingClientListWaiter stack_waiter(xids, arraysize(xids)); stack_waiter.Wait(); ui::X11EventSource::GetInstance()->DispatchXEvents(); EXPECT_EQ(xid1, FindTopmostXWindowAt(105, 105)); } // Test that the TopmostWindowFinder finds windows which belong to menus // (which may or may not belong to Chrome). TEST_F(X11TopmostWindowFinderTest, Menu) { XID xid = CreateAndShowXWindow(gfx::Rect(100, 100, 100, 100)); XID root = DefaultRootWindow(xdisplay()); XSetWindowAttributes swa; swa.override_redirect = True; XID menu_xid = XCreateWindow(xdisplay(), root, 0, 0, 1, 1, 0, // border width CopyFromParent, // depth InputOutput, CopyFromParent, // visual CWOverrideRedirect, &swa); { const char* kAtomsToCache[] = { "_NET_WM_WINDOW_TYPE_MENU", NULL }; ui::X11AtomCache atom_cache(gfx::GetXDisplay(), kAtomsToCache); ui::SetAtomProperty(menu_xid, "_NET_WM_WINDOW_TYPE", "ATOM", atom_cache.GetAtom("_NET_WM_WINDOW_TYPE_MENU")); } ui::SetUseOSWindowFrame(menu_xid, false); ShowAndSetXWindowBounds(menu_xid, gfx::Rect(140, 110, 100, 100)); ui::X11EventSource::GetInstance()->DispatchXEvents(); // |menu_xid| is never added to _NET_CLIENT_LIST_STACKING. XID xids[] = { xid }; StackingClientListWaiter stack_waiter(xids, arraysize(xids)); stack_waiter.Wait(); EXPECT_EQ(xid, FindTopmostXWindowAt(110, 110)); EXPECT_EQ(menu_xid, FindTopmostXWindowAt(150, 120)); EXPECT_EQ(menu_xid, FindTopmostXWindowAt(210, 120)); XDestroyWindow(xdisplay(), xid); XDestroyWindow(xdisplay(), menu_xid); } } // namespace views
CapOM/ChromiumGStreamerBackend
ui/views/widget/desktop_aura/x11_topmost_window_finder_interactive_uitest.cc
C++
bsd-3-clause
15,664
/* eslint strict:0 */ var modules = Object.create(null); var inGuard = false; function define(id, factory) { modules[id] = { factory, module: {exports: {}}, isInitialized: false, hasError: false, }; if (__DEV__) { // HMR Object.assign(modules[id].module, { hot: { acceptCallback: null, accept: function(callback) { modules[id].module.hot.acceptCallback = callback; } } }); } } function require(id) { var mod = modules[id]; if (mod && mod.isInitialized) { return mod.module.exports; } return requireImpl(id); } function requireImpl(id) { if (global.ErrorUtils && !inGuard) { inGuard = true; var returnValue; try { returnValue = requireImpl.apply(this, arguments); } catch (e) { global.ErrorUtils.reportFatalError(e); } inGuard = false; return returnValue; } var mod = modules[id]; if (!mod) { var msg = 'Requiring unknown module "' + id + '"'; if (__DEV__) { msg += '. If you are sure the module is there, try restarting the packager.'; } throw new Error(msg); } if (mod.hasError) { throw new Error( 'Requiring module "' + id + '" which threw an exception' ); } try { // We must optimistically mark mod as initialized before running the factory to keep any // require cycles inside the factory from causing an infinite require loop. mod.isInitialized = true; __DEV__ && Systrace().beginEvent('JS_require_' + id); // keep args in sync with with defineModuleCode in // packager/react-packager/src/Resolver/index.js mod.factory.call(global, global, require, mod.module, mod.module.exports); __DEV__ && Systrace().endEvent(); } catch (e) { mod.hasError = true; mod.isInitialized = false; throw e; } return mod.module.exports; } const Systrace = __DEV__ && (() => { var _Systrace; try { _Systrace = require('Systrace'); } catch (e) {} return _Systrace && _Systrace.beginEvent ? _Systrace : { beginEvent: () => {}, endEvent: () => {} }; }); global.__d = define; global.require = require; if (__DEV__) { // HMR function accept(id, factory) { var mod = modules[id]; if (!mod) { define(id, factory); return; // new modules don't need to be accepted } if (!mod.module.hot) { console.warn( 'Cannot accept module because Hot Module Replacement ' + 'API was not installed.' ); return; } if (mod.module.hot.acceptCallback) { mod.factory = factory; mod.isInitialized = false; require(id); mod.module.hot.acceptCallback(); } else { console.warn( '[HMR] Module `' + id + '` can\'t be hot reloaded because it ' + 'doesn\'t provide accept callback hook. Reload the app to get the updates.' ); } } global.__accept = accept; }
shinate/react-native
packager/react-packager/src/Resolver/polyfills/require.js
JavaScript
bsd-3-clause
2,910
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. /** * @fileoverview * * It2MeHelpeeChannel relays messages between the Hangouts web page (Hangouts) * and the It2Me Native Messaging Host (It2MeHost) for the helpee (the Hangouts * participant who is receiving remoting assistance). * * It runs in the background page. It contains a chrome.runtime.Port object, * representing a connection to Hangouts, and a remoting.It2MeHostFacade object, * representing a connection to the IT2Me Native Messaging Host. * * Hangouts It2MeHelpeeChannel It2MeHost * |---------runtime.connect()-------->| | * |-----------hello message---------->| | * |<-----helloResponse message------->| | * |----------connect message--------->| | * | |-----showConfirmDialog()----->| * | |----------connect()---------->| * | |<-------hostStateChanged------| * | | (RECEIVED_ACCESS_CODE) | * |<---connect response (access code)-| | * | | | * * Hangouts will send the access code to the web app on the helper side. * The helper will then connect to the It2MeHost using the access code. * * Hangouts It2MeHelpeeChannel It2MeHost * | |<-------hostStateChanged------| * | | (CONNECTED) | * |<-- hostStateChanged(CONNECTED)----| | * |-------disconnect message--------->| | * |<--hostStateChanged(DISCONNECTED)--| | * * * It also handles host downloads and install status queries: * * Hangouts It2MeHelpeeChannel * |------isHostInstalled message----->| * |<-isHostInstalled response(false)--| * | | * |--------downloadHost message------>| * | | * |------isHostInstalled message----->| * |<-isHostInstalled response(false)--| * | | * |------isHostInstalled message----->| * |<-isHostInstalled response(true)---| */ 'use strict'; /** @suppress {duplicate} */ var remoting = remoting || {}; /** * @param {chrome.runtime.Port} hangoutPort * @param {remoting.It2MeHostFacade} host * @param {remoting.HostInstaller} hostInstaller * @param {function()} onDisposedCallback Callback to notify the client when * the connection is torn down. * * @constructor * @implements {base.Disposable} */ remoting.It2MeHelpeeChannel = function(hangoutPort, host, hostInstaller, onDisposedCallback) { /** * @type {chrome.runtime.Port} * @private */ this.hangoutPort_ = hangoutPort; /** * @type {remoting.It2MeHostFacade} * @private */ this.host_ = host; /** * @type {?remoting.HostInstaller} * @private */ this.hostInstaller_ = hostInstaller; /** * @type {remoting.HostSession.State} * @private */ this.hostState_ = remoting.HostSession.State.UNKNOWN; /** * @type {?function()} * @private */ this.onDisposedCallback_ = onDisposedCallback; this.onHangoutMessageRef_ = this.onHangoutMessage_.bind(this); this.onHangoutDisconnectRef_ = this.onHangoutDisconnect_.bind(this); }; /** @enum {string} */ remoting.It2MeHelpeeChannel.HangoutMessageTypes = { CONNECT: 'connect', CONNECT_RESPONSE: 'connectResponse', DISCONNECT: 'disconnect', DOWNLOAD_HOST: 'downloadHost', ERROR: 'error', HELLO: 'hello', HELLO_RESPONSE: 'helloResponse', HOST_STATE_CHANGED: 'hostStateChanged', IS_HOST_INSTALLED: 'isHostInstalled', IS_HOST_INSTALLED_RESPONSE: 'isHostInstalledResponse' }; /** @enum {string} */ remoting.It2MeHelpeeChannel.Features = { REMOTE_ASSISTANCE: 'remoteAssistance' }; remoting.It2MeHelpeeChannel.prototype.init = function() { this.hangoutPort_.onMessage.addListener(this.onHangoutMessageRef_); this.hangoutPort_.onDisconnect.addListener(this.onHangoutDisconnectRef_); }; remoting.It2MeHelpeeChannel.prototype.dispose = function() { if (this.host_ !== null) { this.host_.unhookCallbacks(); this.host_.disconnect(); this.host_ = null; } if (this.hangoutPort_ !== null) { this.hangoutPort_.onMessage.removeListener(this.onHangoutMessageRef_); this.hangoutPort_.onDisconnect.removeListener(this.onHangoutDisconnectRef_); this.hostState_ = remoting.HostSession.State.DISCONNECTED; try { var MessageTypes = remoting.It2MeHelpeeChannel.HangoutMessageTypes; this.hangoutPort_.postMessage({ method: MessageTypes.HOST_STATE_CHANGED, state: this.hostState_ }); } catch (e) { // |postMessage| throws if |this.hangoutPort_| is disconnected // It is safe to ignore the exception. } this.hangoutPort_.disconnect(); this.hangoutPort_ = null; } if (this.onDisposedCallback_ !== null) { this.onDisposedCallback_(); this.onDisposedCallback_ = null; } }; /** * Message Handler for incoming runtime messages from Hangouts. * * @param {{method:string, data:Object.<string,*>}} message * @private */ remoting.It2MeHelpeeChannel.prototype.onHangoutMessage_ = function(message) { try { var MessageTypes = remoting.It2MeHelpeeChannel.HangoutMessageTypes; switch (message.method) { case MessageTypes.HELLO: this.hangoutPort_.postMessage({ method: MessageTypes.HELLO_RESPONSE, supportedFeatures: base.values(remoting.It2MeHelpeeChannel.Features) }); return true; case MessageTypes.IS_HOST_INSTALLED: this.handleIsHostInstalled_(message); return true; case MessageTypes.DOWNLOAD_HOST: this.handleDownloadHost_(message); return true; case MessageTypes.CONNECT: this.handleConnect_(message); return true; case MessageTypes.DISCONNECT: this.dispose(); return true; } throw new Error('Unsupported message method=' + message.method); } catch(e) { var error = /** @type {Error} */ e; this.sendErrorResponse_(message, error.message); } return false; }; /** * Queries the |hostInstaller| for the installation status. * * @param {{method:string, data:Object.<string,*>}} message * @private */ remoting.It2MeHelpeeChannel.prototype.handleIsHostInstalled_ = function(message) { /** @type {remoting.It2MeHelpeeChannel} */ var that = this; /** @param {boolean} installed */ function sendResponse(installed) { var MessageTypes = remoting.It2MeHelpeeChannel.HangoutMessageTypes; that.hangoutPort_.postMessage({ method: MessageTypes.IS_HOST_INSTALLED_RESPONSE, result: installed }); } this.hostInstaller_.isInstalled().then( sendResponse, this.sendErrorResponse_.bind(this, message) ); }; /** * @param {{method:string, data:Object.<string,*>}} message * @private */ remoting.It2MeHelpeeChannel.prototype.handleDownloadHost_ = function(message) { try { this.hostInstaller_.download(); } catch (e) { var error = /** @type {Error} */ e; this.sendErrorResponse_(message, error.message); } }; /** * Disconnect the session if the |hangoutPort| gets disconnected. * @private */ remoting.It2MeHelpeeChannel.prototype.onHangoutDisconnect_ = function() { this.dispose(); }; /** * Connects to the It2Me Native messaging Host and retrieves the access code. * * @param {{method:string, data:Object.<string,*>}} message * @private */ remoting.It2MeHelpeeChannel.prototype.handleConnect_ = function(message) { var email = getStringAttr(message, 'email'); if (!email) { throw new Error('Missing required parameter: email'); } if (this.hostState_ !== remoting.HostSession.State.UNKNOWN) { throw new Error('An existing connection is in progress.'); } this.showConfirmDialog_().then( this.initializeHost_.bind(this) ).then( this.fetchOAuthToken_.bind(this) ).then( this.connectToHost_.bind(this, email), this.sendErrorResponse_.bind(this, message) ); }; /** * Prompts the user before starting the It2Me Native Messaging Host. This * ensures that even if Hangouts is compromised, an attacker cannot start the * host without explicit user confirmation. * * @return {Promise} A promise that resolves to a boolean value, indicating * whether the user accepts the remote assistance or not. * @private */ remoting.It2MeHelpeeChannel.prototype.showConfirmDialog_ = function() { if (base.isAppsV2()) { return this.showConfirmDialogV2_(); } else { return this.showConfirmDialogV1_(); } }; /** * @return {Promise} A promise that resolves to a boolean value, indicating * whether the user accepts the remote assistance or not. * @private */ remoting.It2MeHelpeeChannel.prototype.showConfirmDialogV1_ = function() { var messageHeader = l10n.getTranslationOrError( /*i18n-content*/'HANGOUTS_CONFIRM_DIALOG_MESSAGE_1'); var message1 = l10n.getTranslationOrError( /*i18n-content*/'HANGOUTS_CONFIRM_DIALOG_MESSAGE_2'); var message2 = l10n.getTranslationOrError( /*i18n-content*/'HANGOUTS_CONFIRM_DIALOG_MESSAGE_3'); var message = base.escapeHTML(messageHeader) + '\n' + '- ' + base.escapeHTML(message1) + '\n' + '- ' + base.escapeHTML(message2) + '\n'; if(window.confirm(message)) { return Promise.resolve(); } else { return Promise.reject(new Error(remoting.Error.CANCELLED)); } }; /** * @return {Promise} A promise that resolves to a boolean value, indicating * whether the user accepts the remote assistance or not. * @private */ remoting.It2MeHelpeeChannel.prototype.showConfirmDialogV2_ = function() { var messageHeader = l10n.getTranslationOrError( /*i18n-content*/'HANGOUTS_CONFIRM_DIALOG_MESSAGE_1'); var message1 = l10n.getTranslationOrError( /*i18n-content*/'HANGOUTS_CONFIRM_DIALOG_MESSAGE_2'); var message2 = l10n.getTranslationOrError( /*i18n-content*/'HANGOUTS_CONFIRM_DIALOG_MESSAGE_3'); var message = '<div>' + base.escapeHTML(messageHeader) + '</div>' + '<ul class="insetList">' + '<li>' + base.escapeHTML(message1) + '</li>' + '<li>' + base.escapeHTML(message2) + '</li>' + '</ul>'; /** * @param {function(*=):void} resolve * @param {function(*=):void} reject */ return new Promise(function(resolve, reject) { /** @param {number} result */ function confirmDialogCallback(result) { if (result === 1) { resolve(); } else { reject(new Error(remoting.Error.CANCELLED)); } } remoting.MessageWindow.showConfirmWindow( '', // Empty string to use the package name as the dialog title. message, l10n.getTranslationOrError( /*i18n-content*/'HANGOUTS_CONFIRM_DIALOG_ACCEPT'), l10n.getTranslationOrError( /*i18n-content*/'HANGOUTS_CONFIRM_DIALOG_DECLINE'), confirmDialogCallback ); }); }; /** * @return {Promise} A promise that resolves when the host is initialized. * @private */ remoting.It2MeHelpeeChannel.prototype.initializeHost_ = function() { /** @type {remoting.It2MeHostFacade} */ var host = this.host_; /** * @param {function(*=):void} resolve * @param {function(*=):void} reject */ return new Promise(function(resolve, reject) { if (host.initialized()) { resolve(); } else { host.initialize(resolve, reject); } }); }; /** * @return {Promise} Promise that resolves with the OAuth token as the value. */ remoting.It2MeHelpeeChannel.prototype.fetchOAuthToken_ = function() { if (base.isAppsV2()) { /** * @param {function(*=):void} resolve */ return new Promise(function(resolve){ // TODO(jamiewalch): Make this work with {interactive: true} as well. chrome.identity.getAuthToken({ 'interactive': false }, resolve); }); } else { /** * @param {function(*=):void} resolve */ return new Promise(function(resolve) { /** @type {remoting.OAuth2} */ var oauth2 = new remoting.OAuth2(); var onAuthenticated = function() { oauth2.callWithToken( resolve, function() { throw new Error('Authentication failed.'); }); }; /** @param {remoting.Error} error */ var onError = function(error) { if (error != remoting.Error.NOT_AUTHENTICATED) { throw new Error('Unexpected error fetch auth token: ' + error); } oauth2.doAuthRedirect(onAuthenticated); }; oauth2.callWithToken(resolve, onError); }); } }; /** * Connects to the It2Me Native Messaging Host and retrieves the access code * in the |onHostStateChanged_| callback. * * @param {string} email * @param {string} accessToken * @private */ remoting.It2MeHelpeeChannel.prototype.connectToHost_ = function(email, accessToken) { base.debug.assert(this.host_.initialized()); this.host_.connect( email, 'oauth2:' + accessToken, this.onHostStateChanged_.bind(this), base.doNothing, // Ignore |onNatPolicyChanged|. console.log.bind(console), // Forward logDebugInfo to console.log. remoting.settings.XMPP_SERVER_FOR_IT2ME_HOST, remoting.settings.XMPP_SERVER_USE_TLS, remoting.settings.DIRECTORY_BOT_JID, this.onHostConnectError_); }; /** * @param {remoting.Error} error * @private */ remoting.It2MeHelpeeChannel.prototype.onHostConnectError_ = function(error) { this.sendErrorResponse_(null, error); }; /** * @param {remoting.HostSession.State} state * @private */ remoting.It2MeHelpeeChannel.prototype.onHostStateChanged_ = function(state) { this.hostState_ = state; var MessageTypes = remoting.It2MeHelpeeChannel.HangoutMessageTypes; var HostState = remoting.HostSession.State; switch (state) { case HostState.RECEIVED_ACCESS_CODE: var accessCode = this.host_.getAccessCode(); this.hangoutPort_.postMessage({ method: MessageTypes.CONNECT_RESPONSE, accessCode: accessCode }); break; case HostState.CONNECTED: case HostState.DISCONNECTED: this.hangoutPort_.postMessage({ method: MessageTypes.HOST_STATE_CHANGED, state: state }); break; case HostState.ERROR: this.sendErrorResponse_(null, remoting.Error.UNEXPECTED); break; case HostState.INVALID_DOMAIN_ERROR: this.sendErrorResponse_(null, remoting.Error.INVALID_HOST_DOMAIN); break; default: // It is safe to ignore other state changes. } }; /** * @param {?{method:string, data:Object.<string,*>}} incomingMessage * @param {string|Error} error * @private */ remoting.It2MeHelpeeChannel.prototype.sendErrorResponse_ = function(incomingMessage, error) { if (error instanceof Error) { error = error.message; } console.error('Error responding to message method:' + (incomingMessage ? incomingMessage.method : 'null') + ' error:' + error); this.hangoutPort_.postMessage({ method: remoting.It2MeHelpeeChannel.HangoutMessageTypes.ERROR, message: error, request: incomingMessage }); };
mohamed--abdel-maksoud/chromium.src
remoting/webapp/crd/js/it2me_helpee_channel.js
JavaScript
bsd-3-clause
15,736
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "skia/ext/skia_trace_memory_dump_impl.h" #include "base/trace_event/memory_allocator_dump.h" #include "base/trace_event/memory_dump_manager.h" #include "base/trace_event/process_memory_dump.h" #include "skia/ext/SkDiscardableMemory_chrome.h" namespace skia { namespace { const char kMallocBackingType[] = "malloc"; } SkiaTraceMemoryDumpImpl::SkiaTraceMemoryDumpImpl( base::trace_event::MemoryDumpLevelOfDetail level_of_detail, base::trace_event::ProcessMemoryDump* process_memory_dump) : SkiaTraceMemoryDumpImpl("", level_of_detail, process_memory_dump) {} SkiaTraceMemoryDumpImpl::SkiaTraceMemoryDumpImpl( const std::string& dump_name_prefix, base::trace_event::MemoryDumpLevelOfDetail level_of_detail, base::trace_event::ProcessMemoryDump* process_memory_dump) : dump_name_prefix_(dump_name_prefix), process_memory_dump_(process_memory_dump), request_level_( level_of_detail == base::trace_event::MemoryDumpLevelOfDetail::LIGHT ? SkTraceMemoryDump::kLight_LevelOfDetail : SkTraceMemoryDump::kObjectsBreakdowns_LevelOfDetail) {} SkiaTraceMemoryDumpImpl::~SkiaTraceMemoryDumpImpl() = default; void SkiaTraceMemoryDumpImpl::dumpNumericValue(const char* dumpName, const char* valueName, const char* units, uint64_t value) { auto* dump = process_memory_dump_->GetOrCreateAllocatorDump(dumpName); dump->AddScalar(valueName, units, value); } void SkiaTraceMemoryDumpImpl::dumpStringValue(const char* dump_name, const char* value_name, const char* value) { auto* dump = process_memory_dump_->GetOrCreateAllocatorDump(dump_name); dump->AddString(value_name, "", value); } void SkiaTraceMemoryDumpImpl::setMemoryBacking(const char* dumpName, const char* backingType, const char* backingObjectId) { if (strcmp(backingType, kMallocBackingType) == 0) { auto* dump = process_memory_dump_->GetOrCreateAllocatorDump(dumpName); const char* system_allocator_name = base::trace_event::MemoryDumpManager::GetInstance() ->system_allocator_pool_name(); if (system_allocator_name) { process_memory_dump_->AddSuballocation(dump->guid(), system_allocator_name); } } else { NOTREACHED(); } } void SkiaTraceMemoryDumpImpl::setDiscardableMemoryBacking( const char* dumpName, const SkDiscardableMemory& discardableMemoryObject) { std::string name = dump_name_prefix_ + dumpName; DCHECK(!process_memory_dump_->GetAllocatorDump(name)); const SkDiscardableMemoryChrome& discardable_memory_obj = static_cast<const SkDiscardableMemoryChrome&>(discardableMemoryObject); auto* dump = discardable_memory_obj.CreateMemoryAllocatorDump( name.c_str(), process_memory_dump_); DCHECK(dump); } SkTraceMemoryDump::LevelOfDetail SkiaTraceMemoryDumpImpl::getRequestedDetails() const { return request_level_; } bool SkiaTraceMemoryDumpImpl::shouldDumpWrappedObjects() const { // Chrome already dumps objects it imports into Skia. Avoid duplicate dumps // by asking Skia not to dump them. return false; } } // namespace skia
scheib/chromium
skia/ext/skia_trace_memory_dump_impl.cc
C++
bsd-3-clause
3,620
/* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ #include "MissFailoverRoute.h" #include <folly/dynamic.h> #include "mcrouter/lib/config/RouteHandleFactory.h" #include "mcrouter/routes/McRouteHandleBuilder.h" #include "mcrouter/routes/McrouterRouteHandle.h" namespace facebook { namespace memcache { namespace mcrouter { McrouterRouteHandlePtr makeNullRoute(); McrouterRouteHandlePtr makeMissFailoverRoute( std::vector<McrouterRouteHandlePtr> targets) { if (targets.empty()) { return makeNullRoute(); } if (targets.size() == 1) { return std::move(targets[0]); } return makeMcrouterRouteHandle<MissFailoverRoute>(std::move(targets)); } McrouterRouteHandlePtr makeMissFailoverRoute( RouteHandleFactory<McrouterRouteHandleIf>& factory, const folly::dynamic& json) { std::vector<McrouterRouteHandlePtr> children; if (json.isObject()) { if (auto jchildren = json.get_ptr("children")) { children = factory.createList(*jchildren); } } else { children = factory.createList(json); } return makeMissFailoverRoute(std::move(children)); } }}} // facebook::memcache::mcrouter
is00hcw/mcrouter
mcrouter/routes/MissFailoverRoute.cpp
C++
bsd-3-clause
1,389
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ui/gfx/render_text.h" #include <algorithm> #include <climits> #include "base/command_line.h" #include "base/i18n/break_iterator.h" #include "base/logging.h" #include "base/stl_util.h" #include "base/strings/string_util.h" #include "base/strings/utf_string_conversions.h" #include "third_party/icu/source/common/unicode/rbbi.h" #include "third_party/icu/source/common/unicode/utf16.h" #include "third_party/skia/include/core/SkTypeface.h" #include "third_party/skia/include/effects/SkGradientShader.h" #include "ui/gfx/canvas.h" #include "ui/gfx/geometry/insets.h" #include "ui/gfx/geometry/safe_integer_conversions.h" #include "ui/gfx/render_text_harfbuzz.h" #include "ui/gfx/scoped_canvas.h" #include "ui/gfx/skia_util.h" #include "ui/gfx/switches.h" #include "ui/gfx/text_elider.h" #include "ui/gfx/text_utils.h" #include "ui/gfx/utf16_indexing.h" namespace gfx { namespace { // All chars are replaced by this char when the password style is set. // TODO(benrg): GTK uses the first of U+25CF, U+2022, U+2731, U+273A, '*' // that's available in the font (find_invisible_char() in gtkentry.c). const base::char16 kPasswordReplacementChar = '*'; // Default color used for the text and cursor. const SkColor kDefaultColor = SK_ColorBLACK; // Default color used for drawing selection background. const SkColor kDefaultSelectionBackgroundColor = SK_ColorGRAY; // Fraction of the text size to lower a strike through below the baseline. const SkScalar kStrikeThroughOffset = (-SK_Scalar1 * 6 / 21); // Fraction of the text size to lower an underline below the baseline. const SkScalar kUnderlineOffset = (SK_Scalar1 / 9); // Fraction of the text size to use for a strike through or under-line. const SkScalar kLineThickness = (SK_Scalar1 / 18); // Fraction of the text size to use for a top margin of a diagonal strike. const SkScalar kDiagonalStrikeMarginOffset = (SK_Scalar1 / 4); // Invalid value of baseline. Assigning this value to |baseline_| causes // re-calculation of baseline. const int kInvalidBaseline = INT_MAX; // Returns the baseline, with which the text best appears vertically centered. int DetermineBaselineCenteringText(const Rect& display_rect, const FontList& font_list) { const int display_height = display_rect.height(); const int font_height = font_list.GetHeight(); // Lower and upper bound of baseline shift as we try to show as much area of // text as possible. In particular case of |display_height| == |font_height|, // we do not want to shift the baseline. const int min_shift = std::min(0, display_height - font_height); const int max_shift = std::abs(display_height - font_height); const int baseline = font_list.GetBaseline(); const int cap_height = font_list.GetCapHeight(); const int internal_leading = baseline - cap_height; // Some platforms don't support getting the cap height, and simply return // the entire font ascent from GetCapHeight(). Centering the ascent makes // the font look too low, so if GetCapHeight() returns the ascent, center // the entire font height instead. const int space = display_height - ((internal_leading != 0) ? cap_height : font_height); const int baseline_shift = space / 2 - internal_leading; return baseline + std::max(min_shift, std::min(max_shift, baseline_shift)); } // Converts |Font::FontStyle| flags to |SkTypeface::Style| flags. SkTypeface::Style ConvertFontStyleToSkiaTypefaceStyle(int font_style) { int skia_style = SkTypeface::kNormal; skia_style |= (font_style & Font::BOLD) ? SkTypeface::kBold : 0; skia_style |= (font_style & Font::ITALIC) ? SkTypeface::kItalic : 0; return static_cast<SkTypeface::Style>(skia_style); } // Given |font| and |display_width|, returns the width of the fade gradient. int CalculateFadeGradientWidth(const FontList& font_list, int display_width) { // Fade in/out about 2.5 characters of the beginning/end of the string. // The .5 here is helpful if one of the characters is a space. // Use a quarter of the display width if the display width is very short. const int average_character_width = font_list.GetExpectedTextWidth(1); const double gradient_width = std::min(average_character_width * 2.5, display_width / 4.0); DCHECK_GE(gradient_width, 0.0); return static_cast<int>(floor(gradient_width + 0.5)); } // Appends to |positions| and |colors| values corresponding to the fade over // |fade_rect| from color |c0| to color |c1|. void AddFadeEffect(const Rect& text_rect, const Rect& fade_rect, SkColor c0, SkColor c1, std::vector<SkScalar>* positions, std::vector<SkColor>* colors) { const SkScalar left = static_cast<SkScalar>(fade_rect.x() - text_rect.x()); const SkScalar width = static_cast<SkScalar>(fade_rect.width()); const SkScalar p0 = left / text_rect.width(); const SkScalar p1 = (left + width) / text_rect.width(); // Prepend 0.0 to |positions|, as required by Skia. if (positions->empty() && p0 != 0.0) { positions->push_back(0.0); colors->push_back(c0); } positions->push_back(p0); colors->push_back(c0); positions->push_back(p1); colors->push_back(c1); } // Creates a SkShader to fade the text, with |left_part| specifying the left // fade effect, if any, and |right_part| specifying the right fade effect. skia::RefPtr<SkShader> CreateFadeShader(const Rect& text_rect, const Rect& left_part, const Rect& right_part, SkColor color) { // Fade alpha of 51/255 corresponds to a fade of 0.2 of the original color. const SkColor fade_color = SkColorSetA(color, 51); std::vector<SkScalar> positions; std::vector<SkColor> colors; if (!left_part.IsEmpty()) AddFadeEffect(text_rect, left_part, fade_color, color, &positions, &colors); if (!right_part.IsEmpty()) AddFadeEffect(text_rect, right_part, color, fade_color, &positions, &colors); DCHECK(!positions.empty()); // Terminate |positions| with 1.0, as required by Skia. if (positions.back() != 1.0) { positions.push_back(1.0); colors.push_back(colors.back()); } SkPoint points[2]; points[0].iset(text_rect.x(), text_rect.y()); points[1].iset(text_rect.right(), text_rect.y()); return skia::AdoptRef( SkGradientShader::CreateLinear(&points[0], &colors[0], &positions[0], colors.size(), SkShader::kClamp_TileMode)); } // Converts a FontRenderParams::Hinting value to the corresponding // SkPaint::Hinting value. SkPaint::Hinting FontRenderParamsHintingToSkPaintHinting( FontRenderParams::Hinting params_hinting) { switch (params_hinting) { case FontRenderParams::HINTING_NONE: return SkPaint::kNo_Hinting; case FontRenderParams::HINTING_SLIGHT: return SkPaint::kSlight_Hinting; case FontRenderParams::HINTING_MEDIUM: return SkPaint::kNormal_Hinting; case FontRenderParams::HINTING_FULL: return SkPaint::kFull_Hinting; } return SkPaint::kNo_Hinting; } } // namespace namespace internal { // Value of |underline_thickness_| that indicates that underline metrics have // not been set explicitly. const SkScalar kUnderlineMetricsNotSet = -1.0f; SkiaTextRenderer::SkiaTextRenderer(Canvas* canvas) : canvas_(canvas), canvas_skia_(canvas->sk_canvas()), underline_thickness_(kUnderlineMetricsNotSet), underline_position_(0.0f) { DCHECK(canvas_skia_); paint_.setTextEncoding(SkPaint::kGlyphID_TextEncoding); paint_.setStyle(SkPaint::kFill_Style); paint_.setAntiAlias(true); paint_.setSubpixelText(true); paint_.setLCDRenderText(true); paint_.setHinting(SkPaint::kNormal_Hinting); } SkiaTextRenderer::~SkiaTextRenderer() { } void SkiaTextRenderer::SetDrawLooper(SkDrawLooper* draw_looper) { paint_.setLooper(draw_looper); } void SkiaTextRenderer::SetFontRenderParams(const FontRenderParams& params, bool background_is_transparent) { ApplyRenderParams(params, background_is_transparent, &paint_); } void SkiaTextRenderer::SetTypeface(SkTypeface* typeface) { paint_.setTypeface(typeface); } void SkiaTextRenderer::SetTextSize(SkScalar size) { paint_.setTextSize(size); } void SkiaTextRenderer::SetFontFamilyWithStyle(const std::string& family, int style) { DCHECK(!family.empty()); skia::RefPtr<SkTypeface> typeface = CreateSkiaTypeface(family.c_str(), style); if (typeface) { // |paint_| adds its own ref. So don't |release()| it from the ref ptr here. SetTypeface(typeface.get()); // Enable fake bold text if bold style is needed but new typeface does not // have it. paint_.setFakeBoldText((style & Font::BOLD) && !typeface->isBold()); } } void SkiaTextRenderer::SetForegroundColor(SkColor foreground) { paint_.setColor(foreground); } void SkiaTextRenderer::SetShader(SkShader* shader) { paint_.setShader(shader); } void SkiaTextRenderer::SetUnderlineMetrics(SkScalar thickness, SkScalar position) { underline_thickness_ = thickness; underline_position_ = position; } void SkiaTextRenderer::DrawPosText(const SkPoint* pos, const uint16* glyphs, size_t glyph_count) { const size_t byte_length = glyph_count * sizeof(glyphs[0]); canvas_skia_->drawPosText(&glyphs[0], byte_length, &pos[0], paint_); } void SkiaTextRenderer::DrawDecorations(int x, int y, int width, bool underline, bool strike, bool diagonal_strike) { if (underline) DrawUnderline(x, y, width); if (strike) DrawStrike(x, y, width); if (diagonal_strike) { if (!diagonal_) diagonal_.reset(new DiagonalStrike(canvas_, Point(x, y), paint_)); diagonal_->AddPiece(width, paint_.getColor()); } else if (diagonal_) { EndDiagonalStrike(); } } void SkiaTextRenderer::EndDiagonalStrike() { if (diagonal_) { diagonal_->Draw(); diagonal_.reset(); } } void SkiaTextRenderer::DrawUnderline(int x, int y, int width) { SkScalar x_scalar = SkIntToScalar(x); SkRect r = SkRect::MakeLTRB( x_scalar, y + underline_position_, x_scalar + width, y + underline_position_ + underline_thickness_); if (underline_thickness_ == kUnderlineMetricsNotSet) { const SkScalar text_size = paint_.getTextSize(); r.fTop = SkScalarMulAdd(text_size, kUnderlineOffset, y); r.fBottom = r.fTop + SkScalarMul(text_size, kLineThickness); } canvas_skia_->drawRect(r, paint_); } void SkiaTextRenderer::DrawStrike(int x, int y, int width) const { const SkScalar text_size = paint_.getTextSize(); const SkScalar height = SkScalarMul(text_size, kLineThickness); const SkScalar offset = SkScalarMulAdd(text_size, kStrikeThroughOffset, y); SkScalar x_scalar = SkIntToScalar(x); const SkRect r = SkRect::MakeLTRB(x_scalar, offset, x_scalar + width, offset + height); canvas_skia_->drawRect(r, paint_); } SkiaTextRenderer::DiagonalStrike::DiagonalStrike(Canvas* canvas, Point start, const SkPaint& paint) : canvas_(canvas), start_(start), paint_(paint), total_length_(0) { } SkiaTextRenderer::DiagonalStrike::~DiagonalStrike() { } void SkiaTextRenderer::DiagonalStrike::AddPiece(int length, SkColor color) { pieces_.push_back(Piece(length, color)); total_length_ += length; } void SkiaTextRenderer::DiagonalStrike::Draw() { const SkScalar text_size = paint_.getTextSize(); const SkScalar offset = SkScalarMul(text_size, kDiagonalStrikeMarginOffset); const int thickness = SkScalarCeilToInt(SkScalarMul(text_size, kLineThickness) * 2); const int height = SkScalarCeilToInt(text_size - offset); const Point end = start_ + Vector2d(total_length_, -height); const int clip_height = height + 2 * thickness; paint_.setAntiAlias(true); paint_.setStrokeWidth(SkIntToScalar(thickness)); const bool clipped = pieces_.size() > 1; SkCanvas* sk_canvas = canvas_->sk_canvas(); int x = start_.x(); for (size_t i = 0; i < pieces_.size(); ++i) { paint_.setColor(pieces_[i].second); if (clipped) { canvas_->Save(); sk_canvas->clipRect(RectToSkRect( Rect(x, end.y() - thickness, pieces_[i].first, clip_height))); } canvas_->DrawLine(start_, end, paint_); if (clipped) canvas_->Restore(); x += pieces_[i].first; } } StyleIterator::StyleIterator(const BreakList<SkColor>& colors, const std::vector<BreakList<bool> >& styles) : colors_(colors), styles_(styles) { color_ = colors_.breaks().begin(); for (size_t i = 0; i < styles_.size(); ++i) style_.push_back(styles_[i].breaks().begin()); } StyleIterator::~StyleIterator() {} Range StyleIterator::GetRange() const { Range range(colors_.GetRange(color_)); for (size_t i = 0; i < NUM_TEXT_STYLES; ++i) range = range.Intersect(styles_[i].GetRange(style_[i])); return range; } void StyleIterator::UpdatePosition(size_t position) { color_ = colors_.GetBreak(position); for (size_t i = 0; i < NUM_TEXT_STYLES; ++i) style_[i] = styles_[i].GetBreak(position); } LineSegment::LineSegment() : run(0) {} LineSegment::~LineSegment() {} Line::Line() : preceding_heights(0), baseline(0) {} Line::~Line() {} skia::RefPtr<SkTypeface> CreateSkiaTypeface(const std::string& family, int style) { SkTypeface::Style skia_style = ConvertFontStyleToSkiaTypefaceStyle(style); return skia::AdoptRef(SkTypeface::CreateFromName(family.c_str(), skia_style)); } void ApplyRenderParams(const FontRenderParams& params, bool background_is_transparent, SkPaint* paint) { paint->setAntiAlias(params.antialiasing); paint->setLCDRenderText(!background_is_transparent && params.subpixel_rendering != FontRenderParams::SUBPIXEL_RENDERING_NONE); paint->setSubpixelText(params.subpixel_positioning); paint->setAutohinted(params.autohinter); paint->setHinting(FontRenderParamsHintingToSkPaintHinting(params.hinting)); } } // namespace internal RenderText::~RenderText() { } RenderText* RenderText::CreateInstance() { #if defined(OS_MACOSX) static const bool use_harfbuzz = base::CommandLine::ForCurrentProcess()->HasSwitch( switches::kEnableHarfBuzzRenderText); #else static const bool use_harfbuzz = !base::CommandLine::ForCurrentProcess()->HasSwitch( switches::kDisableHarfBuzzRenderText); #endif return use_harfbuzz ? new RenderTextHarfBuzz : CreateNativeInstance(); } RenderText* RenderText::CreateInstanceForEditing() { static const bool use_harfbuzz = !base::CommandLine::ForCurrentProcess()->HasSwitch( switches::kDisableHarfBuzzRenderText); return use_harfbuzz ? new RenderTextHarfBuzz : CreateNativeInstance(); } void RenderText::SetText(const base::string16& text) { DCHECK(!composition_range_.IsValid()); if (text_ == text) return; text_ = text; // Adjust ranged styles and colors to accommodate a new text length. // Clear style ranges as they might break new text graphemes and apply // the first style to the whole text instead. const size_t text_length = text_.length(); colors_.SetMax(text_length); for (size_t style = 0; style < NUM_TEXT_STYLES; ++style) { BreakList<bool>& break_list = styles_[style]; break_list.SetValue(break_list.breaks().begin()->second); break_list.SetMax(text_length); } cached_bounds_and_offset_valid_ = false; // Reset selection model. SetText should always followed by SetSelectionModel // or SetCursorPosition in upper layer. SetSelectionModel(SelectionModel()); // Invalidate the cached text direction if it depends on the text contents. if (directionality_mode_ == DIRECTIONALITY_FROM_TEXT) text_direction_ = base::i18n::UNKNOWN_DIRECTION; obscured_reveal_index_ = -1; UpdateLayoutText(); } void RenderText::SetHorizontalAlignment(HorizontalAlignment alignment) { if (horizontal_alignment_ != alignment) { horizontal_alignment_ = alignment; display_offset_ = Vector2d(); cached_bounds_and_offset_valid_ = false; } } void RenderText::SetFontList(const FontList& font_list) { font_list_ = font_list; const int font_style = font_list.GetFontStyle(); SetStyle(BOLD, (font_style & gfx::Font::BOLD) != 0); SetStyle(ITALIC, (font_style & gfx::Font::ITALIC) != 0); SetStyle(UNDERLINE, (font_style & gfx::Font::UNDERLINE) != 0); baseline_ = kInvalidBaseline; cached_bounds_and_offset_valid_ = false; ResetLayout(); } void RenderText::SetCursorEnabled(bool cursor_enabled) { cursor_enabled_ = cursor_enabled; cached_bounds_and_offset_valid_ = false; } void RenderText::ToggleInsertMode() { insert_mode_ = !insert_mode_; cached_bounds_and_offset_valid_ = false; } void RenderText::SetObscured(bool obscured) { if (obscured != obscured_) { obscured_ = obscured; obscured_reveal_index_ = -1; cached_bounds_and_offset_valid_ = false; UpdateLayoutText(); } } void RenderText::SetObscuredRevealIndex(int index) { if (obscured_reveal_index_ == index) return; obscured_reveal_index_ = index; cached_bounds_and_offset_valid_ = false; UpdateLayoutText(); } void RenderText::SetReplaceNewlineCharsWithSymbols(bool replace) { replace_newline_chars_with_symbols_ = replace; cached_bounds_and_offset_valid_ = false; UpdateLayoutText(); } void RenderText::SetMultiline(bool multiline) { if (multiline != multiline_) { multiline_ = multiline; cached_bounds_and_offset_valid_ = false; lines_.clear(); } } void RenderText::SetElideBehavior(ElideBehavior elide_behavior) { // TODO(skanuj) : Add a test for triggering layout change. if (elide_behavior_ != elide_behavior) { elide_behavior_ = elide_behavior; UpdateLayoutText(); } } void RenderText::SetDisplayRect(const Rect& r) { if (r != display_rect_) { display_rect_ = r; baseline_ = kInvalidBaseline; cached_bounds_and_offset_valid_ = false; lines_.clear(); if (elide_behavior_ != NO_ELIDE) UpdateLayoutText(); } } void RenderText::SetCursorPosition(size_t position) { MoveCursorTo(position, false); } void RenderText::MoveCursor(BreakType break_type, VisualCursorDirection direction, bool select) { SelectionModel cursor(cursor_position(), selection_model_.caret_affinity()); // Cancelling a selection moves to the edge of the selection. if (break_type != LINE_BREAK && !selection().is_empty() && !select) { SelectionModel selection_start = GetSelectionModelForSelectionStart(); int start_x = GetCursorBounds(selection_start, true).x(); int cursor_x = GetCursorBounds(cursor, true).x(); // Use the selection start if it is left (when |direction| is CURSOR_LEFT) // or right (when |direction| is CURSOR_RIGHT) of the selection end. if (direction == CURSOR_RIGHT ? start_x > cursor_x : start_x < cursor_x) cursor = selection_start; // Use the nearest word boundary in the proper |direction| for word breaks. if (break_type == WORD_BREAK) cursor = GetAdjacentSelectionModel(cursor, break_type, direction); // Use an adjacent selection model if the cursor is not at a valid position. if (!IsValidCursorIndex(cursor.caret_pos())) cursor = GetAdjacentSelectionModel(cursor, CHARACTER_BREAK, direction); } else { cursor = GetAdjacentSelectionModel(cursor, break_type, direction); } if (select) cursor.set_selection_start(selection().start()); MoveCursorTo(cursor); } bool RenderText::MoveCursorTo(const SelectionModel& model) { // Enforce valid selection model components. size_t text_length = text().length(); Range range(std::min(model.selection().start(), text_length), std::min(model.caret_pos(), text_length)); // The current model only supports caret positions at valid cursor indices. if (!IsValidCursorIndex(range.start()) || !IsValidCursorIndex(range.end())) return false; SelectionModel sel(range, model.caret_affinity()); bool changed = sel != selection_model_; SetSelectionModel(sel); return changed; } bool RenderText::SelectRange(const Range& range) { Range sel(std::min(range.start(), text().length()), std::min(range.end(), text().length())); // Allow selection bounds at valid indicies amid multi-character graphemes. if (!IsValidLogicalIndex(sel.start()) || !IsValidLogicalIndex(sel.end())) return false; LogicalCursorDirection affinity = (sel.is_reversed() || sel.is_empty()) ? CURSOR_FORWARD : CURSOR_BACKWARD; SetSelectionModel(SelectionModel(sel, affinity)); return true; } bool RenderText::IsPointInSelection(const Point& point) { if (selection().is_empty()) return false; SelectionModel cursor = FindCursorPosition(point); return RangeContainsCaret( selection(), cursor.caret_pos(), cursor.caret_affinity()); } void RenderText::ClearSelection() { SetSelectionModel(SelectionModel(cursor_position(), selection_model_.caret_affinity())); } void RenderText::SelectAll(bool reversed) { const size_t length = text().length(); const Range all = reversed ? Range(length, 0) : Range(0, length); const bool success = SelectRange(all); DCHECK(success); } void RenderText::SelectWord() { if (obscured_) { SelectAll(false); return; } size_t selection_max = selection().GetMax(); base::i18n::BreakIterator iter(text(), base::i18n::BreakIterator::BREAK_WORD); bool success = iter.Init(); DCHECK(success); if (!success) return; size_t selection_min = selection().GetMin(); if (selection_min == text().length() && selection_min != 0) --selection_min; for (; selection_min != 0; --selection_min) { if (iter.IsStartOfWord(selection_min) || iter.IsEndOfWord(selection_min)) break; } if (selection_min == selection_max && selection_max != text().length()) ++selection_max; for (; selection_max < text().length(); ++selection_max) if (iter.IsEndOfWord(selection_max) || iter.IsStartOfWord(selection_max)) break; const bool reversed = selection().is_reversed(); MoveCursorTo(reversed ? selection_max : selection_min, false); MoveCursorTo(reversed ? selection_min : selection_max, true); } const Range& RenderText::GetCompositionRange() const { return composition_range_; } void RenderText::SetCompositionRange(const Range& composition_range) { CHECK(!composition_range.IsValid() || Range(0, text_.length()).Contains(composition_range)); composition_range_.set_end(composition_range.end()); composition_range_.set_start(composition_range.start()); ResetLayout(); } void RenderText::SetColor(SkColor value) { colors_.SetValue(value); } void RenderText::ApplyColor(SkColor value, const Range& range) { colors_.ApplyValue(value, range); } void RenderText::SetStyle(TextStyle style, bool value) { styles_[style].SetValue(value); cached_bounds_and_offset_valid_ = false; ResetLayout(); } void RenderText::ApplyStyle(TextStyle style, bool value, const Range& range) { // Do not change styles mid-grapheme to avoid breaking ligatures. const size_t start = IsValidCursorIndex(range.start()) ? range.start() : IndexOfAdjacentGrapheme(range.start(), CURSOR_BACKWARD); const size_t end = IsValidCursorIndex(range.end()) ? range.end() : IndexOfAdjacentGrapheme(range.end(), CURSOR_FORWARD); styles_[style].ApplyValue(value, Range(start, end)); cached_bounds_and_offset_valid_ = false; ResetLayout(); } bool RenderText::GetStyle(TextStyle style) const { return (styles_[style].breaks().size() == 1) && styles_[style].breaks().front().second; } void RenderText::SetDirectionalityMode(DirectionalityMode mode) { if (mode == directionality_mode_) return; directionality_mode_ = mode; text_direction_ = base::i18n::UNKNOWN_DIRECTION; cached_bounds_and_offset_valid_ = false; ResetLayout(); } base::i18n::TextDirection RenderText::GetTextDirection() { if (text_direction_ == base::i18n::UNKNOWN_DIRECTION) { switch (directionality_mode_) { case DIRECTIONALITY_FROM_TEXT: // Derive the direction from the display text, which differs from text() // in the case of obscured (password) textfields. text_direction_ = base::i18n::GetFirstStrongCharacterDirection(GetLayoutText()); break; case DIRECTIONALITY_FROM_UI: text_direction_ = base::i18n::IsRTL() ? base::i18n::RIGHT_TO_LEFT : base::i18n::LEFT_TO_RIGHT; break; case DIRECTIONALITY_FORCE_LTR: text_direction_ = base::i18n::LEFT_TO_RIGHT; break; case DIRECTIONALITY_FORCE_RTL: text_direction_ = base::i18n::RIGHT_TO_LEFT; break; default: NOTREACHED(); } } return text_direction_; } VisualCursorDirection RenderText::GetVisualDirectionOfLogicalEnd() { return GetTextDirection() == base::i18n::LEFT_TO_RIGHT ? CURSOR_RIGHT : CURSOR_LEFT; } SizeF RenderText::GetStringSizeF() { return GetStringSize(); } float RenderText::GetContentWidthF() { const float string_size = GetStringSizeF().width(); // The cursor is drawn one pixel beyond the int-enclosed text bounds. return cursor_enabled_ ? std::ceil(string_size) + 1 : string_size; } int RenderText::GetContentWidth() { return ToCeiledInt(GetContentWidthF()); } int RenderText::GetBaseline() { if (baseline_ == kInvalidBaseline) baseline_ = DetermineBaselineCenteringText(display_rect(), font_list()); DCHECK_NE(kInvalidBaseline, baseline_); return baseline_; } void RenderText::Draw(Canvas* canvas) { EnsureLayout(); if (clip_to_display_rect()) { Rect clip_rect(display_rect()); clip_rect.Inset(ShadowValue::GetMargin(shadows_)); canvas->Save(); canvas->ClipRect(clip_rect); } if (!text().empty() && focused()) DrawSelection(canvas); if (cursor_enabled() && cursor_visible() && focused()) DrawCursor(canvas, selection_model_); if (!text().empty()) DrawVisualText(canvas); if (clip_to_display_rect()) canvas->Restore(); } void RenderText::DrawCursor(Canvas* canvas, const SelectionModel& position) { // Paint cursor. Replace cursor is drawn as rectangle for now. // TODO(msw): Draw a better cursor with a better indication of association. canvas->FillRect(GetCursorBounds(position, true), cursor_color_); } bool RenderText::IsValidLogicalIndex(size_t index) { // Check that the index is at a valid code point (not mid-surrgate-pair) and // that it's not truncated from the layout text (its glyph may be shown). // // Indices within truncated text are disallowed so users can easily interact // with the underlying truncated text using the ellipsis as a proxy. This lets // users select all text, select the truncated text, and transition from the // last rendered glyph to the end of the text without getting invisible cursor // positions nor needing unbounded arrow key presses to traverse the ellipsis. return index == 0 || index == text().length() || (index < text().length() && (truncate_length_ == 0 || index < truncate_length_) && IsValidCodePointIndex(text(), index)); } Rect RenderText::GetCursorBounds(const SelectionModel& caret, bool insert_mode) { // TODO(ckocagil): Support multiline. This function should return the height // of the line the cursor is on. |GetStringSize()| now returns // the multiline size, eliminate its use here. EnsureLayout(); size_t caret_pos = caret.caret_pos(); DCHECK(IsValidLogicalIndex(caret_pos)); // In overtype mode, ignore the affinity and always indicate that we will // overtype the next character. LogicalCursorDirection caret_affinity = insert_mode ? caret.caret_affinity() : CURSOR_FORWARD; int x = 0, width = 1; Size size = GetStringSize(); if (caret_pos == (caret_affinity == CURSOR_BACKWARD ? 0 : text().length())) { // The caret is attached to the boundary. Always return a 1-dip width caret, // since there is nothing to overtype. if ((GetTextDirection() == base::i18n::RIGHT_TO_LEFT) == (caret_pos == 0)) x = size.width(); } else { size_t grapheme_start = (caret_affinity == CURSOR_FORWARD) ? caret_pos : IndexOfAdjacentGrapheme(caret_pos, CURSOR_BACKWARD); Range xspan(GetGlyphBounds(grapheme_start)); if (insert_mode) { x = (caret_affinity == CURSOR_BACKWARD) ? xspan.end() : xspan.start(); } else { // overtype mode x = xspan.GetMin(); width = xspan.length(); } } return Rect(ToViewPoint(Point(x, 0)), Size(width, size.height())); } const Rect& RenderText::GetUpdatedCursorBounds() { UpdateCachedBoundsAndOffset(); return cursor_bounds_; } size_t RenderText::IndexOfAdjacentGrapheme(size_t index, LogicalCursorDirection direction) { if (index > text().length()) return text().length(); EnsureLayout(); if (direction == CURSOR_FORWARD) { while (index < text().length()) { index++; if (IsValidCursorIndex(index)) return index; } return text().length(); } while (index > 0) { index--; if (IsValidCursorIndex(index)) return index; } return 0; } SelectionModel RenderText::GetSelectionModelForSelectionStart() { const Range& sel = selection(); if (sel.is_empty()) return selection_model_; return SelectionModel(sel.start(), sel.is_reversed() ? CURSOR_BACKWARD : CURSOR_FORWARD); } const Vector2d& RenderText::GetUpdatedDisplayOffset() { UpdateCachedBoundsAndOffset(); return display_offset_; } void RenderText::SetDisplayOffset(int horizontal_offset) { const int extra_content = GetContentWidth() - display_rect_.width(); const int cursor_width = cursor_enabled_ ? 1 : 0; int min_offset = 0; int max_offset = 0; if (extra_content > 0) { switch (GetCurrentHorizontalAlignment()) { case ALIGN_LEFT: min_offset = -extra_content; break; case ALIGN_RIGHT: max_offset = extra_content; break; case ALIGN_CENTER: // The extra space reserved for cursor at the end of the text is ignored // when centering text. So, to calculate the valid range for offset, we // exclude that extra space, calculate the range, and add it back to the // range (if cursor is enabled). min_offset = -(extra_content - cursor_width + 1) / 2 - cursor_width; max_offset = (extra_content - cursor_width) / 2; break; default: break; } } if (horizontal_offset < min_offset) horizontal_offset = min_offset; else if (horizontal_offset > max_offset) horizontal_offset = max_offset; cached_bounds_and_offset_valid_ = true; display_offset_.set_x(horizontal_offset); cursor_bounds_ = GetCursorBounds(selection_model_, insert_mode_); } RenderText::RenderText() : horizontal_alignment_(base::i18n::IsRTL() ? ALIGN_RIGHT : ALIGN_LEFT), directionality_mode_(DIRECTIONALITY_FROM_TEXT), text_direction_(base::i18n::UNKNOWN_DIRECTION), cursor_enabled_(true), cursor_visible_(false), insert_mode_(true), cursor_color_(kDefaultColor), selection_color_(kDefaultColor), selection_background_focused_color_(kDefaultSelectionBackgroundColor), focused_(false), composition_range_(Range::InvalidRange()), colors_(kDefaultColor), styles_(NUM_TEXT_STYLES), composition_and_selection_styles_applied_(false), obscured_(false), obscured_reveal_index_(-1), truncate_length_(0), elide_behavior_(NO_ELIDE), replace_newline_chars_with_symbols_(true), multiline_(false), background_is_transparent_(false), clip_to_display_rect_(true), baseline_(kInvalidBaseline), cached_bounds_and_offset_valid_(false) { } SelectionModel RenderText::GetAdjacentSelectionModel( const SelectionModel& current, BreakType break_type, VisualCursorDirection direction) { EnsureLayout(); if (break_type == LINE_BREAK || text().empty()) return EdgeSelectionModel(direction); if (break_type == CHARACTER_BREAK) return AdjacentCharSelectionModel(current, direction); DCHECK(break_type == WORD_BREAK); return AdjacentWordSelectionModel(current, direction); } SelectionModel RenderText::EdgeSelectionModel( VisualCursorDirection direction) { if (direction == GetVisualDirectionOfLogicalEnd()) return SelectionModel(text().length(), CURSOR_FORWARD); return SelectionModel(0, CURSOR_BACKWARD); } void RenderText::SetSelectionModel(const SelectionModel& model) { DCHECK_LE(model.selection().GetMax(), text().length()); selection_model_ = model; cached_bounds_and_offset_valid_ = false; } const base::string16& RenderText::GetLayoutText() const { return layout_text_; } const BreakList<size_t>& RenderText::GetLineBreaks() { if (line_breaks_.max() != 0) return line_breaks_; const base::string16& layout_text = GetLayoutText(); const size_t text_length = layout_text.length(); line_breaks_.SetValue(0); line_breaks_.SetMax(text_length); base::i18n::BreakIterator iter(layout_text, base::i18n::BreakIterator::BREAK_LINE); const bool success = iter.Init(); DCHECK(success); if (success) { do { line_breaks_.ApplyValue(iter.pos(), Range(iter.pos(), text_length)); } while (iter.Advance()); } return line_breaks_; } void RenderText::ApplyCompositionAndSelectionStyles() { // Save the underline and color breaks to undo the temporary styles later. DCHECK(!composition_and_selection_styles_applied_); saved_colors_ = colors_; saved_underlines_ = styles_[UNDERLINE]; // Apply an underline to the composition range in |underlines|. if (composition_range_.IsValid() && !composition_range_.is_empty()) styles_[UNDERLINE].ApplyValue(true, composition_range_); // Apply the selected text color to the [un-reversed] selection range. if (!selection().is_empty() && focused()) { const Range range(selection().GetMin(), selection().GetMax()); colors_.ApplyValue(selection_color_, range); } composition_and_selection_styles_applied_ = true; } void RenderText::UndoCompositionAndSelectionStyles() { // Restore the underline and color breaks to undo the temporary styles. DCHECK(composition_and_selection_styles_applied_); colors_ = saved_colors_; styles_[UNDERLINE] = saved_underlines_; composition_and_selection_styles_applied_ = false; } Vector2d RenderText::GetLineOffset(size_t line_number) { Vector2d offset = display_rect().OffsetFromOrigin(); // TODO(ckocagil): Apply the display offset for multiline scrolling. if (!multiline()) offset.Add(GetUpdatedDisplayOffset()); else offset.Add(Vector2d(0, lines_[line_number].preceding_heights)); offset.Add(GetAlignmentOffset(line_number)); return offset; } Point RenderText::ToTextPoint(const Point& point) { return point - GetLineOffset(0); // TODO(ckocagil): Convert multiline view space points to text space. } Point RenderText::ToViewPoint(const Point& point) { if (!multiline()) return point + GetLineOffset(0); // TODO(ckocagil): Traverse individual line segments for RTL support. DCHECK(!lines_.empty()); int x = point.x(); size_t line = 0; for (; line < lines_.size() && x > lines_[line].size.width(); ++line) x -= lines_[line].size.width(); return Point(x, point.y()) + GetLineOffset(line); } std::vector<Rect> RenderText::TextBoundsToViewBounds(const Range& x) { std::vector<Rect> rects; if (!multiline()) { rects.push_back(Rect(ToViewPoint(Point(x.GetMin(), 0)), Size(x.length(), GetStringSize().height()))); return rects; } EnsureLayout(); // Each line segment keeps its position in text coordinates. Traverse all line // segments and if the segment intersects with the given range, add the view // rect corresponding to the intersection to |rects|. for (size_t line = 0; line < lines_.size(); ++line) { int line_x = 0; const Vector2d offset = GetLineOffset(line); for (size_t i = 0; i < lines_[line].segments.size(); ++i) { const internal::LineSegment* segment = &lines_[line].segments[i]; const Range intersection = segment->x_range.Intersect(x); if (!intersection.is_empty()) { Rect rect(line_x + intersection.start() - segment->x_range.start(), 0, intersection.length(), lines_[line].size.height()); rects.push_back(rect + offset); } line_x += segment->x_range.length(); } } return rects; } HorizontalAlignment RenderText::GetCurrentHorizontalAlignment() { if (horizontal_alignment_ != ALIGN_TO_HEAD) return horizontal_alignment_; return GetTextDirection() == base::i18n::RIGHT_TO_LEFT ? ALIGN_RIGHT : ALIGN_LEFT; } Vector2d RenderText::GetAlignmentOffset(size_t line_number) { // TODO(ckocagil): Enable |lines_| usage in other platforms. #if defined(OS_WIN) DCHECK_LT(line_number, lines_.size()); #endif Vector2d offset; HorizontalAlignment horizontal_alignment = GetCurrentHorizontalAlignment(); if (horizontal_alignment != ALIGN_LEFT) { #if defined(OS_WIN) const int width = std::ceil(lines_[line_number].size.width()) + (cursor_enabled_ ? 1 : 0); #else const int width = GetContentWidth(); #endif offset.set_x(display_rect().width() - width); // Put any extra margin pixel on the left to match legacy behavior. if (horizontal_alignment == ALIGN_CENTER) offset.set_x((offset.x() + 1) / 2); } // Vertically center the text. if (multiline_) { const int text_height = lines_.back().preceding_heights + lines_.back().size.height(); offset.set_y((display_rect_.height() - text_height) / 2); } else { offset.set_y(GetBaseline() - GetLayoutTextBaseline()); } return offset; } void RenderText::ApplyFadeEffects(internal::SkiaTextRenderer* renderer) { const int width = display_rect().width(); if (multiline() || elide_behavior_ != FADE_TAIL || GetContentWidth() <= width) return; const int gradient_width = CalculateFadeGradientWidth(font_list(), width); if (gradient_width == 0) return; HorizontalAlignment horizontal_alignment = GetCurrentHorizontalAlignment(); Rect solid_part = display_rect(); Rect left_part; Rect right_part; if (horizontal_alignment != ALIGN_LEFT) { left_part = solid_part; left_part.Inset(0, 0, solid_part.width() - gradient_width, 0); solid_part.Inset(gradient_width, 0, 0, 0); } if (horizontal_alignment != ALIGN_RIGHT) { right_part = solid_part; right_part.Inset(solid_part.width() - gradient_width, 0, 0, 0); solid_part.Inset(0, 0, gradient_width, 0); } Rect text_rect = display_rect(); text_rect.Inset(GetAlignmentOffset(0).x(), 0, 0, 0); // TODO(msw): Use the actual text colors corresponding to each faded part. skia::RefPtr<SkShader> shader = CreateFadeShader( text_rect, left_part, right_part, colors_.breaks().front().second); if (shader) renderer->SetShader(shader.get()); } void RenderText::ApplyTextShadows(internal::SkiaTextRenderer* renderer) { skia::RefPtr<SkDrawLooper> looper = CreateShadowDrawLooper(shadows_); renderer->SetDrawLooper(looper.get()); } // static bool RenderText::RangeContainsCaret(const Range& range, size_t caret_pos, LogicalCursorDirection caret_affinity) { // NB: exploits unsigned wraparound (WG14/N1124 section 6.2.5 paragraph 9). size_t adjacent = (caret_affinity == CURSOR_BACKWARD) ? caret_pos - 1 : caret_pos + 1; return range.Contains(Range(caret_pos, adjacent)); } void RenderText::MoveCursorTo(size_t position, bool select) { size_t cursor = std::min(position, text().length()); if (IsValidCursorIndex(cursor)) SetSelectionModel(SelectionModel( Range(select ? selection().start() : cursor, cursor), (cursor == 0) ? CURSOR_FORWARD : CURSOR_BACKWARD)); } void RenderText::UpdateLayoutText() { layout_text_.clear(); line_breaks_.SetMax(0); if (obscured_) { size_t obscured_text_length = static_cast<size_t>(UTF16IndexToOffset(text_, 0, text_.length())); layout_text_.assign(obscured_text_length, kPasswordReplacementChar); if (obscured_reveal_index_ >= 0 && obscured_reveal_index_ < static_cast<int>(text_.length())) { // Gets the index range in |text_| to be revealed. size_t start = obscured_reveal_index_; U16_SET_CP_START(text_.data(), 0, start); size_t end = start; UChar32 unused_char; U16_NEXT(text_.data(), end, text_.length(), unused_char); // Gets the index in |layout_text_| to be replaced. const size_t cp_start = static_cast<size_t>(UTF16IndexToOffset(text_, 0, start)); if (layout_text_.length() > cp_start) layout_text_.replace(cp_start, 1, text_.substr(start, end - start)); } } else { layout_text_ = text_; } const base::string16& text = layout_text_; if (truncate_length_ > 0 && truncate_length_ < text.length()) { // Truncate the text at a valid character break and append an ellipsis. icu::StringCharacterIterator iter(text.c_str()); // Respect ELIDE_HEAD and ELIDE_MIDDLE preferences during truncation. if (elide_behavior_ == ELIDE_HEAD) { iter.setIndex32(text.length() - truncate_length_ + 1); layout_text_.assign(kEllipsisUTF16 + text.substr(iter.getIndex())); } else if (elide_behavior_ == ELIDE_MIDDLE) { iter.setIndex32(truncate_length_ / 2); const size_t ellipsis_start = iter.getIndex(); iter.setIndex32(text.length() - (truncate_length_ / 2)); const size_t ellipsis_end = iter.getIndex(); DCHECK_LE(ellipsis_start, ellipsis_end); layout_text_.assign(text.substr(0, ellipsis_start) + kEllipsisUTF16 + text.substr(ellipsis_end)); } else { iter.setIndex32(truncate_length_ - 1); layout_text_.assign(text.substr(0, iter.getIndex()) + kEllipsisUTF16); } } if (elide_behavior_ != NO_ELIDE && elide_behavior_ != FADE_TAIL && !layout_text_.empty() && GetContentWidth() > display_rect_.width()) { // This doesn't trim styles so ellipsis may get rendered as a different // style than the preceding text. See crbug.com/327850. layout_text_.assign(Elide(layout_text_, static_cast<float>(display_rect_.width()), elide_behavior_)); } // Replace the newline character with a newline symbol in single line mode. static const base::char16 kNewline[] = { '\n', 0 }; static const base::char16 kNewlineSymbol[] = { 0x2424, 0 }; if (!multiline_ && replace_newline_chars_with_symbols_) base::ReplaceChars(layout_text_, kNewline, kNewlineSymbol, &layout_text_); ResetLayout(); } base::string16 RenderText::Elide(const base::string16& text, float available_width, ElideBehavior behavior) { if (available_width <= 0 || text.empty()) return base::string16(); if (behavior == ELIDE_EMAIL) return ElideEmail(text, available_width); // Create a RenderText copy with attributes that affect the rendering width. scoped_ptr<RenderText> render_text = CreateInstanceOfSameType(); render_text->SetFontList(font_list_); render_text->SetDirectionalityMode(directionality_mode_); render_text->SetCursorEnabled(cursor_enabled_); render_text->set_truncate_length(truncate_length_); render_text->styles_ = styles_; render_text->colors_ = colors_; render_text->SetText(text); if (render_text->GetContentWidthF() <= available_width) return text; const base::string16 ellipsis = base::string16(kEllipsisUTF16); const bool insert_ellipsis = (behavior != TRUNCATE); const bool elide_in_middle = (behavior == ELIDE_MIDDLE); const bool elide_at_beginning = (behavior == ELIDE_HEAD); StringSlicer slicer(text, ellipsis, elide_in_middle, elide_at_beginning); render_text->SetText(ellipsis); const float ellipsis_width = render_text->GetContentWidthF(); if (insert_ellipsis && (ellipsis_width > available_width)) return base::string16(); // Use binary search to compute the elided text. size_t lo = 0; size_t hi = text.length() - 1; const base::i18n::TextDirection text_direction = GetTextDirection(); for (size_t guess = (lo + hi) / 2; lo <= hi; guess = (lo + hi) / 2) { // Restore colors. They will be truncated to size by SetText. render_text->colors_ = colors_; base::string16 new_text = slicer.CutString(guess, insert_ellipsis && behavior != ELIDE_TAIL); render_text->SetText(new_text); // This has to be an additional step so that the ellipsis is rendered with // same style as trailing part of the text. if (insert_ellipsis && behavior == ELIDE_TAIL) { // When ellipsis follows text whose directionality is not the same as that // of the whole text, it will be rendered with the directionality of the // whole text. Since we want ellipsis to indicate continuation of the // preceding text, we force the directionality of ellipsis to be same as // the preceding text using LTR or RTL markers. base::i18n::TextDirection trailing_text_direction = base::i18n::GetLastStrongCharacterDirection(new_text); new_text.append(ellipsis); if (trailing_text_direction != text_direction) { if (trailing_text_direction == base::i18n::LEFT_TO_RIGHT) new_text += base::i18n::kLeftToRightMark; else new_text += base::i18n::kRightToLeftMark; } render_text->SetText(new_text); } // Restore styles. Make sure style ranges don't break new text graphemes. render_text->styles_ = styles_; for (size_t style = 0; style < NUM_TEXT_STYLES; ++style) { BreakList<bool>& break_list = render_text->styles_[style]; break_list.SetMax(render_text->text_.length()); Range range; while (range.end() < break_list.max()) { BreakList<bool>::const_iterator current_break = break_list.GetBreak(range.end()); range = break_list.GetRange(current_break); if (range.end() < break_list.max() && !render_text->IsValidCursorIndex(range.end())) { range.set_end(render_text->IndexOfAdjacentGrapheme(range.end(), CURSOR_FORWARD)); break_list.ApplyValue(current_break->second, range); } } } // We check the width of the whole desired string at once to ensure we // handle kerning/ligatures/etc. correctly. const float guess_width = render_text->GetContentWidthF(); if (guess_width == available_width) break; if (guess_width > available_width) { hi = guess - 1; // Move back on the loop terminating condition when the guess is too wide. if (hi < lo) lo = hi; } else { lo = guess + 1; } } return render_text->text(); } base::string16 RenderText::ElideEmail(const base::string16& email, float available_width) { // The returned string will have at least one character besides the ellipsis // on either side of '@'; if that's impossible, a single ellipsis is returned. // If possible, only the username is elided. Otherwise, the domain is elided // in the middle, splitting available width equally with the elided username. // If the username is short enough that it doesn't need half the available // width, the elided domain will occupy that extra width. // Split the email into its local-part (username) and domain-part. The email // spec allows for @ symbols in the username under some special requirements, // but not in the domain part, so splitting at the last @ symbol is safe. const size_t split_index = email.find_last_of('@'); DCHECK_NE(split_index, base::string16::npos); base::string16 username = email.substr(0, split_index); base::string16 domain = email.substr(split_index + 1); DCHECK(!username.empty()); DCHECK(!domain.empty()); // Subtract the @ symbol from the available width as it is mandatory. const base::string16 kAtSignUTF16 = base::ASCIIToUTF16("@"); available_width -= GetStringWidthF(kAtSignUTF16, font_list()); // Check whether eliding the domain is necessary: if eliding the username // is sufficient, the domain will not be elided. const float full_username_width = GetStringWidthF(username, font_list()); const float available_domain_width = available_width - std::min(full_username_width, GetStringWidthF(username.substr(0, 1) + kEllipsisUTF16, font_list())); if (GetStringWidthF(domain, font_list()) > available_domain_width) { // Elide the domain so that it only takes half of the available width. // Should the username not need all the width available in its half, the // domain will occupy the leftover width. // If |desired_domain_width| is greater than |available_domain_width|: the // minimal username elision allowed by the specifications will not fit; thus // |desired_domain_width| must be <= |available_domain_width| at all cost. const float desired_domain_width = std::min<float>(available_domain_width, std::max<float>(available_width - full_username_width, available_width / 2)); domain = Elide(domain, desired_domain_width, ELIDE_MIDDLE); // Failing to elide the domain such that at least one character remains // (other than the ellipsis itself) remains: return a single ellipsis. if (domain.length() <= 1U) return base::string16(kEllipsisUTF16); } // Fit the username in the remaining width (at this point the elided username // is guaranteed to fit with at least one character remaining given all the // precautions taken earlier). available_width -= GetStringWidthF(domain, font_list()); username = Elide(username, available_width, ELIDE_TAIL); return username + kAtSignUTF16 + domain; } void RenderText::UpdateCachedBoundsAndOffset() { if (cached_bounds_and_offset_valid_) return; // TODO(ckocagil): Add support for scrolling multiline text. int delta_x = 0; if (cursor_enabled()) { // When cursor is enabled, ensure it is visible. For this, set the valid // flag true and calculate the current cursor bounds using the stale // |display_offset_|. Then calculate the change in offset needed to move the // cursor into the visible area. cached_bounds_and_offset_valid_ = true; cursor_bounds_ = GetCursorBounds(selection_model_, insert_mode_); // TODO(bidi): Show RTL glyphs at the cursor position for ALIGN_LEFT, etc. if (cursor_bounds_.right() > display_rect_.right()) delta_x = display_rect_.right() - cursor_bounds_.right(); else if (cursor_bounds_.x() < display_rect_.x()) delta_x = display_rect_.x() - cursor_bounds_.x(); } SetDisplayOffset(display_offset_.x() + delta_x); } void RenderText::DrawSelection(Canvas* canvas) { const std::vector<Rect> sel = GetSubstringBounds(selection()); for (std::vector<Rect>::const_iterator i = sel.begin(); i < sel.end(); ++i) canvas->FillRect(*i, selection_background_focused_color_); } } // namespace gfx
mohamed--abdel-maksoud/chromium.src
ui/gfx/render_text.cc
C++
bsd-3-clause
52,158
////////////////////////////////////////////////////////////////////////// // // Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // * Neither the name of Image Engine Design nor the names of any // other contributors to this software may be used to endorse or // promote products derived from this software without specific prior // written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // ////////////////////////////////////////////////////////////////////////// #include "IECore/NURBSPrimitive.h" #include "IECore/Renderer.h" #include "IECore/MurmurHash.h" using namespace std; using namespace IECore; using namespace Imath; using namespace boost; static IndexedIO::EntryID g_uOrderEntry("uOrder"); static IndexedIO::EntryID g_uKnotEntry("uKnot"); static IndexedIO::EntryID g_uMinEntry("uMin"); static IndexedIO::EntryID g_uMaxEntry("uMax"); static IndexedIO::EntryID g_vOrderEntry("vOrder"); static IndexedIO::EntryID g_vKnotEntry("vKnot"); static IndexedIO::EntryID g_vMinEntry("vMin"); static IndexedIO::EntryID g_vMaxEntry("vMax"); const unsigned int NURBSPrimitive::m_ioVersion = 0; IE_CORE_DEFINEOBJECTTYPEDESCRIPTION(NURBSPrimitive); NURBSPrimitive::NURBSPrimitive() { vector<float> knot; knot.push_back( 0 ); knot.push_back( 0 ); knot.push_back( 0 ); knot.push_back( 0.333 ); knot.push_back( 0.666 ); knot.push_back( 1 ); knot.push_back( 1 ); knot.push_back( 1 ); setTopology( 4, new FloatVectorData( knot ), 0, 1, 4, new FloatVectorData( knot ), 0, 1 ); } NURBSPrimitive::NURBSPrimitive( int uOrder, ConstFloatVectorDataPtr uKnot, float uMin, float uMax, int vOrder, ConstFloatVectorDataPtr vKnot, float vMin, float vMax, ConstV3fVectorDataPtr p ) { setTopology( uOrder, uKnot, uMin, uMax, vOrder, vKnot, vMin, vMax ); if( p ) { V3fVectorDataPtr pData = p->copy(); pData->setInterpretation( GeometricData::Point ); variables.insert( PrimitiveVariableMap::value_type( "P", PrimitiveVariable( PrimitiveVariable::Vertex, pData ) ) ); } } int NURBSPrimitive::uOrder() const { return m_uOrder; } const FloatVectorData *NURBSPrimitive::uKnot() const { return m_uKnot.get(); } float NURBSPrimitive::uMin() const { return m_uMin; } float NURBSPrimitive::uMax() const { return m_uMax; } int NURBSPrimitive::uVertices() const { return m_uKnot->readable().size() - m_uOrder; } int NURBSPrimitive::uSegments() const { return 1 + uVertices() - m_uOrder; } int NURBSPrimitive::vOrder() const { return m_vOrder; } const FloatVectorData *NURBSPrimitive::vKnot() const { return m_vKnot.get(); } float NURBSPrimitive::vMin() const { return m_vMin; } float NURBSPrimitive::vMax() const { return m_vMax; } int NURBSPrimitive::vVertices() const { return m_vKnot->readable().size() - m_vOrder; } int NURBSPrimitive::vSegments() const { return 1 + vVertices() - m_vOrder; } void NURBSPrimitive::setTopology( int uOrder, ConstFloatVectorDataPtr uKnot, float uMin, float uMax, int vOrder, ConstFloatVectorDataPtr vKnot, float vMin, float vMax ) { // check order isn't too small if( uOrder<2 ) { throw Exception( "Order in u direction too small." ); } if( vOrder<2 ) { throw Exception( "Order in v direction too small." ); } // check knots have enough entries for the order. // an order of N demands at least N control points // and numKnots==numControlPoints + order // so we need numKnots>=2*order if( (int)uKnot->readable().size() < uOrder * 2 ) { throw Exception( "Not enough knot values in u direction." ); } if( (int)vKnot->readable().size() < vOrder * 2 ) { throw Exception( "Not enough knot values in v direction." ); } // check knots are monotonically increasing const vector<float> &u = uKnot->readable(); float previous = u[0]; for( unsigned int i=0; i<u.size(); i++ ) { if( u[i]<previous ) { throw Exception( "Knots not monotonically increasing in u direction." ); } previous = u[i]; } const vector<float> &v = vKnot->readable(); previous = v[0]; for( unsigned int i=0; i<v.size(); i++ ) { if( v[i]<previous ) { throw Exception( "Knots not monotonically increasing in v direction." ); } previous = v[i]; } // check min and max parametric values are in range if( uMin > uMax ) { throw Exception( "uMin greater than uMax." ); } if( vMin > vMax ) { throw Exception( "vMin greater than vMax." ); } if( uMin < u[uOrder-2] ) { throw Exception( "uMin too small." ); } if( uMax > u[u.size()-uOrder+1] ) { throw Exception( "uMax too great." ); } if( vMin < v[vOrder-2] ) { throw Exception( "vMin too small." ); } if( vMax > v[v.size()-vOrder+1] ) { throw Exception( "vMax too great." ); } // set everything (taking copies of the data) m_uOrder = uOrder; m_uKnot = uKnot->copy(); m_uMin = uMin; m_uMax = uMax; m_vOrder = vOrder; m_vKnot = vKnot->copy(); m_vMin = vMin; m_vMax = vMax; } size_t NURBSPrimitive::variableSize( PrimitiveVariable::Interpolation interpolation ) const { switch( interpolation ) { case PrimitiveVariable::Constant : return 1; case PrimitiveVariable::Uniform : return uSegments() * vSegments(); case PrimitiveVariable::Vertex : return uVertices() * vVertices(); case PrimitiveVariable::Varying: case PrimitiveVariable::FaceVarying: return (uSegments()+1) * (vSegments()+1); default : return 0; } } void NURBSPrimitive::render( Renderer *renderer ) const { renderer->nurbs( m_uOrder, m_uKnot, m_uMin, m_uMax, m_vOrder, m_vKnot, m_vMin, m_vMax, variables ); } void NURBSPrimitive::copyFrom( const Object *other, IECore::Object::CopyContext *context ) { Primitive::copyFrom( other, context ); const NURBSPrimitive *tOther = static_cast<const NURBSPrimitive *>( other ); m_uOrder = tOther->m_uOrder; m_uKnot = context->copy<FloatVectorData>( tOther->m_uKnot ); m_uMin = tOther->m_uMin; m_uMax = tOther->m_uMax; m_vOrder = tOther->m_vOrder; m_vKnot = context->copy<FloatVectorData>( tOther->m_vKnot ); m_vMin = tOther->m_vMin; m_vMax = tOther->m_vMax; } void NURBSPrimitive::save( IECore::Object::SaveContext *context ) const { Primitive::save(context); IndexedIOPtr container = context->container( staticTypeName(), m_ioVersion ); container->write( g_uOrderEntry, m_uOrder ); context->save( m_uKnot, container, g_uKnotEntry ); container->write( g_uMinEntry, m_uMin ); container->write( g_uMaxEntry, m_uMax ); container->write( g_vOrderEntry, m_vOrder ); context->save( m_vKnot, container, g_vKnotEntry ); container->write( g_vMinEntry, m_vMin ); container->write( g_vMaxEntry, m_vMax ); } void NURBSPrimitive::load( IECore::Object::LoadContextPtr context ) { Primitive::load(context); unsigned int v = m_ioVersion; ConstIndexedIOPtr container = context->container( staticTypeName(), v ); container->read( g_uOrderEntry, m_uOrder ); m_uKnot = context->load<FloatVectorData>( container, g_uKnotEntry ); container->read( g_uMinEntry, m_uMin ); container->read( g_uMaxEntry, m_uMax ); container->read( g_vOrderEntry, m_vOrder ); m_vKnot = context->load<FloatVectorData>( container, g_vKnotEntry ); container->read( g_vMinEntry, m_vMin ); container->read( g_vMaxEntry, m_vMax ); } bool NURBSPrimitive::isEqualTo( const Object *other ) const { if( !Primitive::isEqualTo( other ) ) { return false; } const NURBSPrimitive *tOther = static_cast<const NURBSPrimitive *>( other ); if( m_uOrder!=tOther->m_uOrder ) { return false; } if( m_vOrder!=tOther->m_vOrder ) { return false; } if( m_uMin!=tOther->m_uMin ) { return false; } if( m_vMin!=tOther->m_vMin ) { return false; } if( m_uMax!=tOther->m_uMax ) { return false; } if( m_vMax!=tOther->m_vMax ) { return false; } if( !m_uKnot->isEqualTo( tOther->m_uKnot ) ) { return false; } if( !m_vKnot->isEqualTo( tOther->m_vKnot ) ) { return false; } return true; } void NURBSPrimitive::memoryUsage( Object::MemoryAccumulator &a ) const { Primitive::memoryUsage( a ); a.accumulate( sizeof( m_uOrder ) * 2 ); a.accumulate( sizeof( m_uMin ) * 4 ); a.accumulate( m_uKnot ); a.accumulate( m_vKnot ); } void NURBSPrimitive::hash( MurmurHash &h ) const { Primitive::hash( h ); } void NURBSPrimitive::topologyHash( MurmurHash &h ) const { h.append( m_uOrder ); m_uKnot->hash( h ); h.append( m_uMin ); h.append( m_uMax ); h.append( m_vOrder ); m_vKnot->hash( h ); h.append( m_vMin ); h.append( m_vMax ); }
code-google-com/cortex-vfx
src/IECore/NURBSPrimitive.cpp
C++
bsd-3-clause
9,702
--TEST-- Test array_merge_recursive() function : usage variations - common key and value(Bug#43559) --FILE-- <?php /* Prototype : array array_merge_recursive(array $arr1[, array $...]) * Description: Recursively merges elements from passed arrays into one array * Source code: ext/standard/array.c */ /* * Testing the functionality of array_merge_recursive() by passing * arrays having common key and value. */ echo "*** Testing array_merge_recursive() : arrays with common key and value ***\n"; /* initialize the array having duplicate values */ // integer values $arr1 = array("a" => 1, "b" => 2); $arr2 = array("b" => 2, "c" => 4); echo "-- Integer values --\n"; var_dump( array_merge_recursive($arr1, $arr2) ); // float values $arr1 = array("a" => 1.1, "b" => 2.2); $arr2 = array("b" => 2.2, "c" => 3.3); echo "-- Float values --\n"; var_dump( array_merge_recursive($arr1, $arr2) ); // string values $arr1 = array("a" => "hello", "b" => "world"); $arr2 = array("b" => "world", "c" => "string"); echo "-- String values --\n"; var_dump( array_merge_recursive($arr1, $arr2) ); // boolean values $arr1 = array("a" => true, "b" => false); $arr2 = array("b" => false); echo "-- Boolean values --\n"; var_dump( array_merge_recursive($arr1, $arr2) ); // null values $arr1 = array( "a" => NULL); $arr2 = array( "a" => NULL); echo "-- Null values --\n"; var_dump( array_merge_recursive($arr1, $arr2) ); echo "Done"; ?> --EXPECTF-- *** Testing array_merge_recursive() : arrays with common key and value *** -- Integer values -- array(3) { ["a"]=> int(1) ["b"]=> array(2) { [0]=> int(2) [1]=> int(2) } ["c"]=> int(4) } -- Float values -- array(3) { ["a"]=> float(1.1) ["b"]=> array(2) { [0]=> float(2.2) [1]=> float(2.2) } ["c"]=> float(3.3) } -- String values -- array(3) { ["a"]=> string(5) "hello" ["b"]=> array(2) { [0]=> string(5) "world" [1]=> string(5) "world" } ["c"]=> string(6) "string" } -- Boolean values -- array(2) { ["a"]=> bool(true) ["b"]=> array(2) { [0]=> bool(false) [1]=> bool(false) } } -- Null values -- array(1) { ["a"]=> array(2) { [0]=> NULL [1]=> NULL } } Done
ericpp/hippyvm
test_phpt/ext/standard/array/array_merge_recursive_variation9.phpt
PHP
mit
2,233
require 'optparse' require 'uri' require 'puma/server' require 'puma/const' require 'puma/configuration' require 'puma/binder' require 'puma/detect' require 'puma/daemon_ext' require 'puma/util' require 'puma/single' require 'puma/cluster' require 'puma/commonlogger' module Puma class << self # The CLI exports its Puma::Configuration object here to allow # apps to pick it up. An app needs to use it conditionally though # since it is not set if the app is launched via another # mechanism than the CLI class. attr_accessor :cli_config end # Handles invoke a Puma::Server in a command line style. # class CLI # Create a new CLI object using +argv+ as the command line # arguments. # # +stdout+ and +stderr+ can be set to IO-like objects which # this object will report status on. # def initialize(argv, events=Events.stdio) @debug = false @argv = argv @events = events @status = nil @runner = nil @config = nil ENV['NEWRELIC_DISPATCHER'] ||= "Puma" setup_options generate_restart_data @binder = Binder.new(@events) @binder.import_from_env end # The Binder object containing the sockets bound to. attr_reader :binder # The Configuration object used. attr_reader :config # The Hash of options used to configure puma. attr_reader :options # The Events object used to output information. attr_reader :events # Delegate +log+ to +@events+ # def log(str) @events.log str end # Delegate +error+ to +@events+ # def error(str) @events.error str end def debug(str) @events.log "- #{str}" if @options[:debug] end def clustered? @options[:workers] > 0 end def prune_bundler? @options[:prune_bundler] && clustered? && !@options[:preload_app] end def jruby? IS_JRUBY end def windows? RUBY_PLATFORM =~ /mswin32|ming32/ end def env @options[:environment] || ENV['RACK_ENV'] || 'development' end def write_state write_pid path = @options[:state] return unless path state = { 'pid' => Process.pid } cfg = @config.dup [ :logger, :before_worker_shutdown, :before_worker_boot, :before_worker_fork, :after_worker_boot, :on_restart, :lowlevel_error_handler ].each { |k| cfg.options.delete(k) } state['config'] = cfg require 'yaml' File.open(path, 'w') { |f| f.write state.to_yaml } end # If configured, write the pid of the current process out # to a file. # def write_pid path = @options[:pidfile] return unless path File.open(path, 'w') { |f| f.puts Process.pid } cur = Process.pid at_exit do delete_pidfile if cur == Process.pid end end def delete_pidfile path = @options[:pidfile] File.unlink(path) if path && File.exist?(path) end def graceful_stop @runner.stop_blocked log "=== puma shutdown: #{Time.now} ===" log "- Goodbye!" end def jruby_daemon_start require 'puma/jruby_restart' JRubyRestart.daemon_start(@restart_dir, restart_args) end def restart! @options[:on_restart].each do |block| block.call self end if jruby? close_binder_listeners require 'puma/jruby_restart' JRubyRestart.chdir_exec(@restart_dir, restart_args) elsif windows? close_binder_listeners argv = restart_args Dir.chdir(@restart_dir) argv += [redirects] if RUBY_VERSION >= '1.9' Kernel.exec(*argv) else redirects = {:close_others => true} @binder.listeners.each_with_index do |(l, io), i| ENV["PUMA_INHERIT_#{i}"] = "#{io.to_i}:#{l}" redirects[io.to_i] = io.to_i end argv = restart_args Dir.chdir(@restart_dir) argv += [redirects] if RUBY_VERSION >= '1.9' Kernel.exec(*argv) end end # Parse the options, load the rackup, start the server and wait # for it to finish. # def run begin parse_options rescue UnsupportedOption exit 1 end dir = @options[:directory] Dir.chdir(dir) if dir prune_bundler if prune_bundler? set_rack_environment if clustered? @events.formatter = Events::PidFormatter.new @options[:logger] = @events @runner = Cluster.new(self) else @runner = Single.new(self) end setup_signals set_process_title @status = :run @runner.run case @status when :halt log "* Stopping immediately!" when :run, :stop graceful_stop when :restart log "* Restarting..." @runner.before_restart restart! when :exit # nothing end end def stop @status = :stop @runner.stop end def restart @status = :restart @runner.restart end def reload_worker_directory @runner.reload_worker_directory if @runner.respond_to?(:reload_worker_directory) end def phased_restart unless @runner.respond_to?(:phased_restart) and @runner.phased_restart log "* phased-restart called but not available, restarting normally." return restart end true end def redirect_io @runner.redirect_io end def stats @runner.stats end def halt @status = :halt @runner.halt end private def title buffer = "puma #{Puma::Const::VERSION} (#{@options[:binds].join(',')})" buffer << " [#{@options[:tag]}]" if @options[:tag] buffer end def unsupported(str) @events.error(str) raise UnsupportedOption end def restart_args cmd = @options[:restart_cmd] if cmd cmd.split(' ') + @original_argv else @restart_argv end end def set_process_title Process.respond_to?(:setproctitle) ? Process.setproctitle(title) : $0 = title end def find_config if @options[:config_file] == '-' @options[:config_file] = nil else @options[:config_file] ||= %W(config/puma/#{env}.rb config/puma.rb).find { |f| File.exist?(f) } end end # Build the OptionParser object to handle the available options. # def setup_options @options = { :min_threads => 0, :max_threads => 16, :quiet => false, :debug => false, :binds => [], :workers => 0, :daemon => false, :before_worker_shutdown => [], :before_worker_boot => [], :before_worker_fork => [], :after_worker_boot => [] } @parser = OptionParser.new do |o| o.on "-b", "--bind URI", "URI to bind to (tcp://, unix://, ssl://)" do |arg| @options[:binds] << arg end o.on "-C", "--config PATH", "Load PATH as a config file" do |arg| @options[:config_file] = arg end o.on "--control URL", "The bind url to use for the control server", "Use 'auto' to use temp unix server" do |arg| if arg @options[:control_url] = arg elsif jruby? unsupported "No default url available on JRuby" end end o.on "--control-token TOKEN", "The token to use as authentication for the control server" do |arg| @options[:control_auth_token] = arg end o.on "-d", "--daemon", "Daemonize the server into the background" do @options[:daemon] = true @options[:quiet] = true end o.on "--debug", "Log lowlevel debugging information" do @options[:debug] = true end o.on "--dir DIR", "Change to DIR before starting" do |d| @options[:directory] = d.to_s @options[:worker_directory] = d.to_s end o.on "-e", "--environment ENVIRONMENT", "The environment to run the Rack app on (default development)" do |arg| @options[:environment] = arg end o.on "-I", "--include PATH", "Specify $LOAD_PATH directories" do |arg| $LOAD_PATH.unshift(*arg.split(':')) end o.on "-p", "--port PORT", "Define the TCP port to bind to", "Use -b for more advanced options" do |arg| @options[:binds] << "tcp://#{Configuration::DefaultTCPHost}:#{arg}" end o.on "--pidfile PATH", "Use PATH as a pidfile" do |arg| @options[:pidfile] = arg end o.on "--preload", "Preload the app. Cluster mode only" do @options[:preload_app] = true end o.on "--prune-bundler", "Prune out the bundler env if possible" do @options[:prune_bundler] = true end o.on "-q", "--quiet", "Quiet down the output" do @options[:quiet] = true end o.on "-R", "--restart-cmd CMD", "The puma command to run during a hot restart", "Default: inferred" do |cmd| @options[:restart_cmd] = cmd end o.on "-S", "--state PATH", "Where to store the state details" do |arg| @options[:state] = arg end o.on '-t', '--threads INT', "min:max threads to use (default 0:16)" do |arg| min, max = arg.split(":") if max @options[:min_threads] = min @options[:max_threads] = max else @options[:min_threads] = 0 @options[:max_threads] = arg end end o.on "--tcp-mode", "Run the app in raw TCP mode instead of HTTP mode" do @options[:mode] = :tcp end o.on "-V", "--version", "Print the version information" do puts "puma version #{Puma::Const::VERSION}" exit 0 end o.on "-w", "--workers COUNT", "Activate cluster mode: How many worker processes to create" do |arg| @options[:workers] = arg.to_i end o.on "--tag NAME", "Additional text to display in process listing" do |arg| @options[:tag] = arg end o.banner = "puma <options> <rackup file>" o.on_tail "-h", "--help", "Show help" do log o exit 0 end end end def generate_restart_data # Use the same trick as unicorn, namely favor PWD because # it will contain an unresolved symlink, useful for when # the pwd is /data/releases/current. if dir = ENV['PWD'] s_env = File.stat(dir) s_pwd = File.stat(Dir.pwd) if s_env.ino == s_pwd.ino and (jruby? or s_env.dev == s_pwd.dev) @restart_dir = dir @options[:worker_directory] = dir end end @restart_dir ||= Dir.pwd @original_argv = @argv.dup require 'rubygems' # if $0 is a file in the current directory, then restart # it the same, otherwise add -S on there because it was # picked up in PATH. # if File.exist?($0) arg0 = [Gem.ruby, $0] else arg0 = [Gem.ruby, "-S", $0] end # Detect and reinject -Ilib from the command line lib = File.expand_path "lib" arg0[1,0] = ["-I", lib] if $:[0] == lib if defined? Puma::WILD_ARGS @restart_argv = arg0 + Puma::WILD_ARGS + @original_argv else @restart_argv = arg0 + @original_argv end end def set_rack_environment @options[:environment] = env ENV['RACK_ENV'] = env end def setup_signals begin Signal.trap "SIGUSR2" do restart end rescue Exception log "*** SIGUSR2 not implemented, signal based restart unavailable!" end begin Signal.trap "SIGUSR1" do phased_restart end rescue Exception log "*** SIGUSR1 not implemented, signal based restart unavailable!" end begin Signal.trap "SIGTERM" do stop end rescue Exception log "*** SIGTERM not implemented, signal based gracefully stopping unavailable!" end begin Signal.trap "SIGHUP" do redirect_io end rescue Exception log "*** SIGHUP not implemented, signal based logs reopening unavailable!" end if jruby? Signal.trap("INT") do @status = :exit graceful_stop exit end end end def close_binder_listeners @binder.listeners.each do |l, io| io.close uri = URI.parse(l) next unless uri.scheme == 'unix' File.unlink("#{uri.host}#{uri.path}") end end def parse_options @parser.parse! @argv @options[:rackup] = @argv.shift if @argv.last find_config @config = Puma::Configuration.new @options # Advertise the Configuration Puma.cli_config = @config @config.load if clustered? && (jruby? || windows?) unsupported 'worker mode not supported on JRuby or Windows' end if @options[:daemon] && windows? unsupported 'daemon mode not supported on Windows' end end def prune_bundler return unless defined?(Bundler) puma = Bundler.rubygems.loaded_specs("puma") dirs = puma.require_paths.map { |x| File.join(puma.full_gem_path, x) } puma_lib_dir = dirs.detect { |x| File.exist? File.join(x, '../bin/puma-wild') } unless puma_lib_dir log "! Unable to prune Bundler environment, continuing" return end deps = puma.runtime_dependencies.map do |d| spec = Bundler.rubygems.loaded_specs(d.name) "#{d.name}:#{spec.version.to_s}" end log '* Pruning Bundler environment' home = ENV['GEM_HOME'] Bundler.with_clean_env do ENV['GEM_HOME'] = home wild = File.expand_path(File.join(puma_lib_dir, "../bin/puma-wild")) args = [Gem.ruby, wild, '-I', dirs.join(':'), deps.join(',')] + @original_argv Kernel.exec(*args) end end end end
praveenperera/qrideshare
vendor/cache/ruby/2.2.0/gems/puma-2.12.0/lib/puma/cli.rb
Ruby
mit
14,321
<?php /* * This file is part of the Symfony package. * * (c) Fabien Potencier <fabien@symfony.com> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Symfony\Component\Form\Extension\Core\Type; use Symfony\Component\Form\AbstractType; use Symfony\Component\Intl\Intl; use Symfony\Component\Locale\Locale; use Symfony\Component\OptionsResolver\OptionsResolverInterface; class LocaleType extends AbstractType { /** * {@inheritdoc} */ public function setDefaultOptions(OptionsResolverInterface $resolver) { $resolver->setDefaults(array( 'choices' => Intl::getLocaleBundle()->getLocaleNames(), )); } /** * {@inheritdoc} */ public function getParent() { return 'choice'; } /** * {@inheritdoc} */ public function getName() { return 'locale'; } }
Clempops/prenomea
vendor/symfony/symfony/src/Symfony/Component/Form/Extension/Core/Type/LocaleType.php
PHP
mit
1,010
KB.onClick('.accordion-toggle', function (e) { var sectionElement = KB.dom(e.target).parent('.accordion-section'); if (sectionElement) { KB.dom(sectionElement).toggleClass('accordion-collapsed'); } });
Shaxine/kanboard
assets/js/components/accordion.js
JavaScript
mit
223
alert("foo!");
beni55/unfiltered
netty-server/src/test/resources/files/foo.js
JavaScript
mit
14
/* Copyright (C) 2011 by Ivan Safrin Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include "PolyCollisionScene.h" #include "PolyCollisionSceneEntity.h" #include "PolyEntity.h" using namespace Polycode; CollisionScene::CollisionScene(Vector3 size, bool virtualScene, bool deferInitCollision) : Scene(Scene::SCENE_3D, virtualScene), world(NULL), collisionConfiguration(NULL), dispatcher(NULL), axisSweep(NULL) { if(!deferInitCollision) { initCollisionScene(size); } } void CollisionScene::initCollisionScene(Vector3 size) { btVector3 worldAabbMin(-size.x * 0.5, -size.y * 0.5, -size.z * 0.5); btVector3 worldAabbMax(size.x * 0.5, size.y * 0.5, size.z * 0.5); collisionConfiguration = new btDefaultCollisionConfiguration(); dispatcher = new btCollisionDispatcher(collisionConfiguration); // dispatcher->setNearCallback(customNearCallback); axisSweep = new btAxisSweep3(worldAabbMin,worldAabbMax); world = new btCollisionWorld(dispatcher,axisSweep,collisionConfiguration); } void CollisionScene::fixedUpdate() { for(int i=0; i < collisionChildren.size(); i++) { if(collisionChildren[i]->enabled) collisionChildren[i]->Update(); } world->performDiscreteCollisionDetection(); for(int i=0; i < collisionChildren.size(); i++) { if(collisionChildren[i]->enabled) collisionChildren[i]->lastPosition = collisionChildren[i]->getEntity()->getPosition(); } Scene::fixedUpdate(); } void CollisionScene::enableCollision(Entity *entity, bool val) { CollisionEntity *cEnt = getCollisionByScreenEntity(entity); if(cEnt) { cEnt->enabled = val; } } void CollisionScene::adjustForCollision(CollisionEntity *collisionEntity) { CollisionResult result; // Number elapsed = CoreServices::getInstance()->getCore()->getElapsed(); result.collided = false; for(int i=0; i < collisionChildren.size(); i++) { if(collisionChildren[i] != collisionEntity) { result = testCollisionOnCollisionChild(collisionEntity, collisionChildren[i]); if(result.collided) { if(result.setOldPosition) { collisionEntity->getEntity()->setPosition(result.newPos); } else { collisionEntity->getEntity()->Translate(result.colNormal.x*result.colDist, result.colNormal.y*result.colDist, result.colNormal.z*result.colDist); } } } } } CollisionEntity *CollisionScene::getCollisionByScreenEntity(Entity *ent) { for(int i=0; i<collisionChildren.size();i++) { if(collisionChildren[i]->getEntity() == ent) return collisionChildren[i]; } return NULL; } bool CollisionScene::isColliding(Entity *ent1) { CollisionEntity *cEnt1 = getCollisionByScreenEntity(ent1); if(cEnt1) { int numManifolds = world->getDispatcher()->getNumManifolds(); for (int i=0;i<numManifolds;i++) { btPersistentManifold* contactManifold = world->getDispatcher()->getManifoldByIndexInternal(i); btCollisionObject* obA = (btCollisionObject*)contactManifold->getBody0(); btCollisionObject* obB = (btCollisionObject*)contactManifold->getBody1(); if(obA == cEnt1->collisionObject || obB == cEnt1->collisionObject) { return true; } } } else { return false; } return false; } CollisionResult CollisionScene::testCollisionOnCollisionChild_Convex(CollisionEntity *cEnt1, CollisionEntity *cEnt2) { CollisionResult result; result.collided = false; result.setOldPosition = false; Vector3 collNormal; result.colNormal.set(0,0,0); result.colDist = 0; int numAdds = 0; int numManifolds = world->getDispatcher()->getNumManifolds(); for (int i=0;i<numManifolds;i++) { btPersistentManifold* contactManifold = world->getDispatcher()->getManifoldByIndexInternal(i); btCollisionObject* obA = (btCollisionObject*)contactManifold->getBody0(); btCollisionObject* obB = (btCollisionObject*)contactManifold->getBody1(); if((obA == cEnt1->collisionObject && obB == cEnt2->collisionObject) || (obA == cEnt2->collisionObject && obB == cEnt1->collisionObject)) { // contactManifold->refreshContactPoints(obA->getWorldTransform(), obB->getWorldTransform()); if(contactManifold->getNumContacts() > 0) { for(int j=0; j < contactManifold->getNumContacts(); j++) { if(contactManifold->getContactPoint(j).getDistance() <= btScalar(0.0)) { btVector3 vec = contactManifold->getContactPoint(j).m_normalWorldOnB; result.colNormal += Vector3(vec.getX(), vec.getY(), vec.getZ()); result.colDist += contactManifold->getContactPoint(j).getDistance(); numAdds++; } } // btVector3 vec = contactManifold->getContactPoint(0).m_normalWorldOnB; // result.colNormal.set(vec.getX(), vec.getY(), vec.getZ()); // result.colDist = contactManifold->getContactPoint(0).getDistance(); result.collided = true; } } } if(numAdds > 0) { result.colNormal = result.colNormal / (Number)numAdds; // result.colNormal = Vector3(0,1,0); // result.colNormal.Normalize(); result.colDist = result.colDist / (Number)numAdds; } return result; // return cEnt1->collisionObject->checkCollideWith(cEnt2->collisionObject); } RayTestResult CollisionScene::getFirstEntityInRay(const Vector3 &origin, const Vector3 &dest) { RayTestResult ret; ret.entity = NULL; btVector3 fromVec(origin.x, origin.y, origin.z); btVector3 toVec(dest.x, dest.y, dest.z); btCollisionWorld::ClosestRayResultCallback cb(fromVec, toVec); world->rayTest (fromVec, toVec, cb); if (cb.hasHit ()) { CollisionEntity *retEnt = getCollisionEntityByObject((btCollisionObject*)cb.m_collisionObject); if(retEnt) { ret.entity = retEnt->getEntity(); ret.position = Vector3(cb.m_hitPointWorld.getX(), cb.m_hitPointWorld.getY(), cb.m_hitPointWorld.getZ()); ret.normal = Vector3(cb.m_hitNormalWorld.getX(), cb.m_hitNormalWorld.getY(), cb.m_hitNormalWorld.getZ()); return ret; } } return ret; } CollisionEntity *CollisionScene::getCollisionEntityByObject(btCollisionObject *collisionObject) { return (CollisionEntity*)collisionObject->getUserPointer(); } CollisionResult CollisionScene::testCollisionOnCollisionChild(CollisionEntity *cEnt1, CollisionEntity *cEnt2) { return testCollisionOnCollisionChild_Convex(cEnt1, cEnt2); } CollisionResult CollisionScene::testCollision(Entity *ent1, Entity *ent2) { CollisionEntity *cEnt1 = getCollisionByScreenEntity(ent1); CollisionEntity *cEnt2 = getCollisionByScreenEntity(ent2); CollisionResult result; result.collided = false; if(cEnt1 == NULL || cEnt2 == NULL) return result; return testCollisionOnCollisionChild(cEnt1, cEnt2); } CollisionScene::~CollisionScene() { for(int i=0; i < collisionChildren.size(); i++) { delete collisionChildren[i]; } delete world; delete axisSweep; delete dispatcher; delete collisionConfiguration; } void CollisionScene::removeCollision(Entity *entity) { CollisionEntity *cEnt = getCollisionByScreenEntity(entity); if(cEnt) { world->removeCollisionObject(cEnt->collisionObject); for(int i=0; i < collisionChildren.size(); i++) { if(collisionChildren[i] == cEnt) { std::vector<CollisionEntity*>::iterator target = collisionChildren.begin()+i; delete *target; collisionChildren.erase(target); } } } } void CollisionScene::removeEntity(Entity *entity) { if(getCollisionByScreenEntity(entity)) { removeCollision(entity); } Scene::removeEntity(entity); } CollisionEntity *CollisionScene::trackCollision(Entity *newEntity, int type, int group) { CollisionEntity *newCollisionEntity = new CollisionEntity(newEntity, type); // if(type == CollisionEntity::CHARACTER_CONTROLLER) { // world->addCollisionObject(newCollisionEntity->collisionObject,btBroadphaseProxy::CharacterFilter, btBroadphaseProxy::StaticFilter|btBroadphaseProxy::DefaultFilter); // } else { newCollisionEntity->collisionObject->setCollisionFlags(btCollisionObject::CF_NO_CONTACT_RESPONSE); world->addCollisionObject(newCollisionEntity->collisionObject, group); // } collisionChildren.push_back(newCollisionEntity); return newCollisionEntity; } CollisionEntity *CollisionScene::addCollisionChild(Entity *newEntity, int type, int group) { addEntity(newEntity); return trackCollision(newEntity, type, group); }
carlosmarti/Polycode
src/modules/physics3D/PolyCollisionScene.cpp
C++
mit
9,249
require File.expand_path('../../../spec_helper', __FILE__) describe :rational_quo, shared: true do it "needs to be reviewed for spec completeness" end
askl56/rubyspec
shared/rational/quo.rb
Ruby
mit
154
<?php /* * This file is part of the Symfony package. * * (c) Fabien Potencier <fabien@symfony.com> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Symfony\Component\Security\Core\Authorization\Voter; /** * Let voters expose the attributes and types they care about. * * By returning false to either `supportsAttribute` or `supportsType`, the * voter will never be called for the specified attribute or subject. * * @author Jérémy Derussé <jeremy@derusse.com> */ interface CacheableVoterInterface extends VoterInterface { public function supportsAttribute(string $attribute): bool; /** * @param string $subjectType The type of the subject inferred by `get_class` or `get_debug_type` */ public function supportsType(string $subjectType): bool; }
Tobion/symfony
src/Symfony/Component/Security/Core/Authorization/Voter/CacheableVoterInterface.php
PHP
mit
879
})(); Clazz._coreLoaded = true;
xavierprat/chemEdData
libs/jsmol/jmol-14.8.0/jsmol/js/core/corebottom.js
JavaScript
mit
44
// Don't need this for our purposes module = function(){}; if(typeof equal != 'undefined') { equals = equal; } ok = function(actual, message) { equal(actual, true, message); } raises = function(fn, expected, message) { raisesError(fn, message); }; asyncTest = function(name, delay, fn) { test(name, fn); } start = function() { // Just pass through... } notStrictEqual = function(a, b, message) { equal(a === b, false, message); } var ensureArray = function(obj) { if(obj === null) { return []; } else if(Object.isArray(obj) && (!obj.indexOf || !obj.lastIndexOf)) { return obj.concat(); } else if(!Object.isArray(obj) && typeof obj == 'object') { return Array.prototype.slice.call(obj); } else { return obj; } } var CompatibleMethods = [ { module: Array.prototype, methods: [ { name: 'first', method: function(arr, n, guard){ if(guard) { return arr[0]; } return ensureArray(arr).first(n); } }, { name: 'last', method: function(arr, n, third){ // This is the same check that Underscore makes to hack // _.last to work with _.map if(third) n = 1; return ensureArray(arr).last(n); } }, { name: 'rest', method: function(arr, n, guard){ if(n === undefined) n = 1; if(guard) { return arr.slice(1); } return ensureArray(arr).from(n); } }, { name: 'compact', method: function(arr){ return ensureArray(arr).compact(true); } }, /* Object.extend is no longer compatible as it has conflict resolution now. { name: 'extend', method: function(){ return Object.SugarMethods['merge'].method.apply(this, arguments); } }, */ /* Array#flatten is no longer compatible as it has levels of flattening (not just deep/shallow) { name: 'flatten', method: function(arr){ return ensureArray(arr).flatten(); } }, */ { name: 'uniq', method: function(arr){ return ensureArray(arr).unique(); } }, { name: 'intersection', method: function(arr){ arr = ensureArray(arr); var args = Array.prototype.slice.call(arguments, 1); return Array.prototype.intersect.apply(arr, args); } }, { name: 'union', method: function(arr, a){ arr = ensureArray(arr); var args = Array.prototype.slice.call(arguments, 1); return Array.prototype.union.apply(arr, args); } }, /* { name: 'difference', method: function(arr, a){ arr = ensureArray(arr); var args = Array.prototype.slice.call(arguments, 1); return Array.prototype.subtract.apply(arr, args); } }, */ { name: 'indexOf', method: function(arr, a){ return ensureArray(arr).indexOf(a); } }, { name: 'lastIndexOf', method: function(arr, a){ return ensureArray(arr).lastIndexOf(a); } }, { name: 'range', method: function(start, stop, step){ if(arguments.length == 1){ stop = arguments[0]; start = 0; } var shift = step < 0 ? 1 : -1; return start.upto(stop + shift, null, step); } }, // Collections // _.each -> Array#forEach OR Object.each // _.map -> Array#map // _.reduce -> Array#reduce // _.reduceRight -> Array#reduceRight // _.invoke is doing some strange tapdancing for passing methods directly... // _.sortedIndex ... no direct equivalent // _.toArray ... no direct equivalent for arguments... Array.create? // _.size ... no direct equivalent for objects... obj.keys().length? { name: 'detect', method: function(arr, fn, context){ return Array.SugarMethods['find'].method.call(arr, fn.bind(context)); } }, { name: 'select', method: function(arr, fn, context){ return Array.SugarMethods['findAll'].method.call(arr, fn.bind(context)); } }, { name: 'reject', method: function(arr, fn, context){ return Array.SugarMethods['exclude'].method.call(arr, fn.bind(context)); } }, { name: 'all', method: function(arr, fn, context){ return Array.SugarMethods['all'].method.call(arr, fn.bind(context)); } }, { name: 'any', method: function(arr, fn, context){ if(!fn) fn = function(a){ return a; }; return Array.SugarMethods['some'].method.call(arr, fn.bind(context)); } }, /* { name: 'include', method: function(arr, val){ return Array.SugarMethods['has'].method.call(arr, val); } }, */ { name: 'pluck', method: function(arr, prop){ return Array.SugarMethods['map'].method.call(arr, prop); } }, { name: 'max', method: function(arr, fn, context){ if(!fn) fn = function(a){ return a; }; return Array.SugarMethods['max'].method.call(arr, fn.bind(context))[0]; } }, { name: 'min', method: function(arr, fn, context){ if(!fn) fn = function(a){ return a; }; return Array.SugarMethods['min'].method.call(arr, fn.bind(context))[0]; } }, { name: 'sortBy', method: function(arr, fn, context){ return Array.SugarMethods['sortBy'].method.call(arr, fn.bind(context)); } }, { name: 'groupBy', method: function(arr, fn){ return Array.SugarMethods['groupBy'].method.call(arr, fn); } }, // Objects // _.functions ... no direct equivalent // _.defaults ... no direct equivalent // _.tap ... no direct equivalent // _.isElement ... no direct equivalent // _.isArguments ... no direct equivalent // _.isNaN ... no direct equivalent // _.isNull ... no direct equivalent // _.isUndefined ... no direct equivalent { name: 'keys', method: function(){ return Object.SugarMethods['keys'].method.apply(this, arguments); } }, { name: 'values', method: function(){ return Object.SugarMethods['values'].method.apply(this, arguments); } }, { name: 'clone', method: function(){ return Object.SugarMethods['clone'].method.apply(this, arguments); } }, { name: 'isEqual', method: function(a, b){ if (a && a._chain) a = a._wrapped; if (b && b._chain) b = b._wrapped; if (a && a.isEqual) return a.isEqual(b); if (b && b.isEqual) return b.isEqual(a); return Object.SugarMethods['equal'].method.apply(this, arguments); } }, { name: 'isEmpty', method: function(){ return Object.SugarMethods['isEmpty'].method.apply(this, arguments); } }, { name: 'isArray', method: function(arr){ return Array.isArray(arr); } }, { name: 'isFunction', method: function(){ return Object.SugarMethods['isFunction'].method.apply(this, arguments); } }, { name: 'isString', method: function(){ return Object.SugarMethods['isString'].method.apply(this, arguments); } }, { name: 'isNumber', method: function(){ if(isNaN(arguments[0])) { // Sugar differs here as it's trying to stay aligned with Javascript and is // checking types only. return false; } return Object.SugarMethods['isNumber'].method.apply(this, arguments); } }, { name: 'isBoolean', method: function(){ return Object.SugarMethods['isBoolean'].method.apply(this, arguments); } }, { name: 'isDate', method: function(){ return Object.SugarMethods['isDate'].method.apply(this, arguments); } }, { name: 'isRegExp', method: function(){ return Object.SugarMethods['isRegExp'].method.apply(this, arguments); } }, // Functions // _.bindAll ... no direct equivalent (similar to bindAsEventListener??) // _.memoize ... no direct equivalent // _.debounce ... no direct equivalent // _.once ... no direct equivalent.. is this not similar to memoize? // _.wrap ... no direct equivalent.. // _.compose ... no direct equivalent.. math stuff { name: 'bind', method: function(fn){ var args = Array.prototype.slice.call(arguments, 1); return Function.prototype.bind.apply(fn, args); } }, { name: 'after', method: function(num, fn){ return Function.prototype.after.apply(fn, [num]); } }, { name: 'delay', method: function(fn){ var args = Array.prototype.slice.call(arguments, 1); return Function.prototype.delay.apply(fn, args); } }, { name: 'defer', method: function(fn){ var args = Array.prototype.slice.call(arguments, 1); return Function.prototype.delay.apply(fn, [1].concat(args)); } }, { name: 'throttle', method: function(fn, wait){ return Function.prototype.lazy.apply(fn, [wait]); } }, // Utility // _.noConflict ... no direct equivalent // _.identity ... no direct equivalent // _.mixin ... no direct equivalent // _.uniqueId ... no direct equivalent // _.template ... no direct equivalent // _.chain ... no direct equivalent // _.value ... no direct equivalent { name: 'times', method: function(n, fn){ return n.times(fn); } } ] } ]; var mapMethods = function() { var proto; CompatibleMethods.forEach(function(cm) { cm.methods.forEach(function(m) { _[m.name] = m.method; }); }); } mapMethods();
D1plo1d/Sugar
unit_tests/environments/underscore/adapter.js
JavaScript
mit
10,615
package aima.core.probability.util; import java.util.HashSet; import java.util.Map; import java.util.Set; import aima.core.probability.RandomVariable; import aima.core.probability.domain.Domain; import aima.core.probability.proposition.TermProposition; /** * Default implementation of the RandomVariable interface. * * Note: Also implements the TermProposition interface so its easy to use * RandomVariables in conjunction with propositions about them in the * Probability Model APIs. * * @author Ciaran O'Reilly */ public class RandVar implements RandomVariable, TermProposition { private String name = null; private Domain domain = null; private Set<RandomVariable> scope = new HashSet<RandomVariable>(); public RandVar(String name, Domain domain) { ProbUtil.checkValidRandomVariableName(name); if (null == domain) { throw new IllegalArgumentException( "Domain of RandomVariable must be specified."); } this.name = name; this.domain = domain; this.scope.add(this); } // // START-RandomVariable @Override public String getName() { return name; } @Override public Domain getDomain() { return domain; } // END-RandomVariable // // // START-TermProposition @Override public RandomVariable getTermVariable() { return this; } @Override public Set<RandomVariable> getScope() { return scope; } @Override public Set<RandomVariable> getUnboundScope() { return scope; } @Override public boolean holds(Map<RandomVariable, Object> possibleWorld) { return possibleWorld.containsKey(getTermVariable()); } // END-TermProposition // @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof RandomVariable)) { return false; } // The name (not the name:domain combination) uniquely identifies a // Random Variable RandomVariable other = (RandomVariable) o; return this.name.equals(other.getName()); } @Override public int hashCode() { return name.hashCode(); } @Override public String toString() { return getName(); } }
aima-java/aima-java
aima-core/src/main/java/aima/core/probability/util/RandVar.java
Java
mit
2,171
package aima.core.probability.bayes.exact; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import aima.core.probability.CategoricalDistribution; import aima.core.probability.Factor; import aima.core.probability.RandomVariable; import aima.core.probability.bayes.BayesInference; import aima.core.probability.bayes.BayesianNetwork; import aima.core.probability.bayes.FiniteNode; import aima.core.probability.bayes.Node; import aima.core.probability.proposition.AssignmentProposition; import aima.core.probability.util.ProbabilityTable; /** * Artificial Intelligence A Modern Approach (3rd Edition): Figure 14.11, page * 528.<br> * <br> * * <pre> * function ELIMINATION-ASK(X, e, bn) returns a distribution over X * inputs: X, the query variable * e, observed values for variables E * bn, a Bayesian network specifying joint distribution P(X<sub>1</sub>, ..., X<sub>n</sub>) * * factors <- [] * for each var in ORDER(bn.VARS) do * factors <- [MAKE-FACTOR(var, e) | factors] * if var is hidden variable the factors <- SUM-OUT(var, factors) * return NORMALIZE(POINTWISE-PRODUCT(factors)) * </pre> * * Figure 14.11 The variable elimination algorithm for inference in Bayesian * networks. <br> * <br> * <b>Note:</b> The implementation has been extended to handle queries with * multiple variables. <br> * * @author Ciaran O'Reilly */ public class EliminationAsk implements BayesInference { // private static final ProbabilityTable _identity = new ProbabilityTable( new double[] { 1.0 }); public EliminationAsk() { } // function ELIMINATION-ASK(X, e, bn) returns a distribution over X /** * The ELIMINATION-ASK algorithm in Figure 14.11. * * @param X * the query variables. * @param e * observed values for variables E. * @param bn * a Bayes net with variables {X} &cup; E &cup; Y /* Y = hidden * variables // * @return a distribution over the query variables. */ public CategoricalDistribution eliminationAsk(final RandomVariable[] X, final AssignmentProposition[] e, final BayesianNetwork bn) { Set<RandomVariable> hidden = new HashSet<RandomVariable>(); List<RandomVariable> VARS = new ArrayList<RandomVariable>(); calculateVariables(X, e, bn, hidden, VARS); // factors <- [] List<Factor> factors = new ArrayList<Factor>(); // for each var in ORDER(bn.VARS) do for (RandomVariable var : order(bn, VARS)) { // factors <- [MAKE-FACTOR(var, e) | factors] factors.add(0, makeFactor(var, e, bn)); // if var is hidden variable then factors <- SUM-OUT(var, factors) if (hidden.contains(var)) { factors = sumOut(var, factors, bn); } } // return NORMALIZE(POINTWISE-PRODUCT(factors)) Factor product = pointwiseProduct(factors); // Note: Want to ensure the order of the product matches the // query variables return ((ProbabilityTable) product.pointwiseProductPOS(_identity, X)) .normalize(); } // // START-BayesInference public CategoricalDistribution ask(final RandomVariable[] X, final AssignmentProposition[] observedEvidence, final BayesianNetwork bn) { return this.eliminationAsk(X, observedEvidence, bn); } // END-BayesInference // // // PROTECTED METHODS // /** * <b>Note:</b>Override this method for a more efficient implementation as * outlined in AIMA3e pgs. 527-28. Calculate the hidden variables from the * Bayesian Network. The default implementation does not perform any of * these.<br> * <br> * Two calcuations to be performed here in order to optimize iteration over * the Bayesian Network:<br> * 1. Calculate the hidden variables to be enumerated over. An optimization * (AIMA3e pg. 528) is to remove 'every variable that is not an ancestor of * a query variable or evidence variable as it is irrelevant to the query' * (i.e. sums to 1). 2. The subset of variables from the Bayesian Network to * be retained after irrelevant hidden variables have been removed. * * @param X * the query variables. * @param e * observed values for variables E. * @param bn * a Bayes net with variables {X} &cup; E &cup; Y /* Y = hidden * variables // * @param hidden * to be populated with the relevant hidden variables Y. * @param bnVARS * to be populated with the subset of the random variables * comprising the Bayesian Network with any irrelevant hidden * variables removed. */ protected void calculateVariables(final RandomVariable[] X, final AssignmentProposition[] e, final BayesianNetwork bn, Set<RandomVariable> hidden, Collection<RandomVariable> bnVARS) { bnVARS.addAll(bn.getVariablesInTopologicalOrder()); hidden.addAll(bnVARS); for (RandomVariable x : X) { hidden.remove(x); } for (AssignmentProposition ap : e) { hidden.removeAll(ap.getScope()); } return; } /** * <b>Note:</b>Override this method for a more efficient implementation as * outlined in AIMA3e pgs. 527-28. The default implementation does not * perform any of these.<br> * * @param bn * the Bayesian Network over which the query is being made. Note, * is necessary to provide this in order to be able to determine * the dependencies between variables. * @param vars * a subset of the RandomVariables making up the Bayesian * Network, with any irrelevant hidden variables alreay removed. * @return a possibly opimal ordering for the random variables to be * iterated over by the algorithm. For example, one fairly effective * ordering is a greedy one: eliminate whichever variable minimizes * the size of the next factor to be constructed. */ protected List<RandomVariable> order(BayesianNetwork bn, Collection<RandomVariable> vars) { // Note: Trivial Approach: // For simplicity just return in the reverse order received, // i.e. received will be the default topological order for // the Bayesian Network and we want to ensure the network // is iterated from bottom up to ensure when hidden variables // are come across all the factors dependent on them have // been seen so far. List<RandomVariable> order = new ArrayList<RandomVariable>(vars); Collections.reverse(order); return order; } // // PRIVATE METHODS // private Factor makeFactor(RandomVariable var, AssignmentProposition[] e, BayesianNetwork bn) { Node n = bn.getNode(var); if (!(n instanceof FiniteNode)) { throw new IllegalArgumentException( "Elimination-Ask only works with finite Nodes."); } FiniteNode fn = (FiniteNode) n; List<AssignmentProposition> evidence = new ArrayList<AssignmentProposition>(); for (AssignmentProposition ap : e) { if (fn.getCPT().contains(ap.getTermVariable())) { evidence.add(ap); } } return fn.getCPT().getFactorFor( evidence.toArray(new AssignmentProposition[evidence.size()])); } private List<Factor> sumOut(RandomVariable var, List<Factor> factors, BayesianNetwork bn) { List<Factor> summedOutFactors = new ArrayList<Factor>(); List<Factor> toMultiply = new ArrayList<Factor>(); for (Factor f : factors) { if (f.contains(var)) { toMultiply.add(f); } else { // This factor does not contain the variable // so no need to sum out - see AIMA3e pg. 527. summedOutFactors.add(f); } } summedOutFactors.add(pointwiseProduct(toMultiply).sumOut(var)); return summedOutFactors; } private Factor pointwiseProduct(List<Factor> factors) { Factor product = factors.get(0); for (int i = 1; i < factors.size(); i++) { product = product.pointwiseProduct(factors.get(i)); } return product; } }
aima-java/aima-java
aima-core/src/main/java/aima/core/probability/bayes/exact/EliminationAsk.java
Java
mit
8,132
//--------------------------------------------------------------------- // <copyright file="MessageReaderSettingsArgs.cs" company="Microsoft"> // Copyright (C) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information. // </copyright> //--------------------------------------------------------------------- namespace Microsoft.OData.Client { using Microsoft.OData.Core; /// <summary> /// Arguments used to configure the odata message reader settings. /// </summary> public class MessageReaderSettingsArgs { /// <summary> /// Initializes a new instance of the <see cref="MessageReaderSettingsArgs"/> class. /// </summary> /// <param name="settings">The settings.</param> public MessageReaderSettingsArgs(ODataMessageReaderSettingsBase settings) { WebUtil.CheckArgumentNull(settings, "settings"); this.Settings = settings; } /// <summary> /// Gets the settings. /// </summary> public ODataMessageReaderSettingsBase Settings { get; private set; } } }
hotchandanisagar/odata.net
src/Microsoft.OData.Client/MessageReaderSettingsArgs.cs
C#
mit
1,175
/*@preserve * Tempus Dominus Bootstrap4 v5.0.0-alpha13 (https://tempusdominus.github.io/bootstrap-4/) * Copyright 2016-2017 Jonathan Peterson * Licensed under MIT (https://github.com/tempusdominus/bootstrap-3/blob/master/LICENSE) */ if (typeof jQuery === 'undefined') { throw new Error('Tempus Dominus Bootstrap4\'s requires jQuery. jQuery must be included before Tempus Dominus Bootstrap4\'s JavaScript.'); } +function ($) { var version = $.fn.jquery.split(' ')[0].split('.'); if ((version[0] < 2 && version[1] < 9) || (version[0] === 1 && version[1] === 9 && version[2] < 1) || (version[0] >= 4)) { throw new Error('Tempus Dominus Bootstrap4\'s requires at least jQuery v1.9.1 but less than v4.0.0'); } }(jQuery); if (typeof moment === 'undefined') { throw new Error('Tempus Dominus Bootstrap4\'s requires moment.js. Moment.js must be included before Tempus Dominus Bootstrap4\'s JavaScript.'); } var version = moment.version.split('.') if ((version[0] <= 2 && version[1] < 17) || (version[0] >= 3)) { throw new Error('Tempus Dominus Bootstrap4\'s requires at least moment.js v2.17.0 but less than v3.0.0'); } +function () { var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } // ReSharper disable once InconsistentNaming var DateTimePicker = function ($, moment) { // ReSharper disable InconsistentNaming var NAME = 'datetimepicker', VERSION = '5.0.0-alpha7', DATA_KEY = '' + NAME, EVENT_KEY = '.' + DATA_KEY, EMIT_EVENT_KEY = DATA_KEY + '.', DATA_API_KEY = '.data-api', Selector = { DATA_TOGGLE: '[data-toggle="' + DATA_KEY + '"]' }, ClassName = { INPUT: NAME + '-input' }, Event = { CHANGE: 'change' + EVENT_KEY, BLUR: 'blur' + EVENT_KEY, KEYUP: 'keyup' + EVENT_KEY, KEYDOWN: 'keydown' + EVENT_KEY, FOCUS: 'focus' + EVENT_KEY, CLICK_DATA_API: 'click' + EVENT_KEY + DATA_API_KEY, //emitted UPDATE: EMIT_EVENT_KEY + 'update', ERROR: EMIT_EVENT_KEY + 'error', HIDE: EMIT_EVENT_KEY + 'hide', SHOW: EMIT_EVENT_KEY + 'show' }, Default = { timeZone: '', format: false, dayViewHeaderFormat: 'MMMM YYYY', extraFormats: false, stepping: 1, minDate: false, maxDate: false, useCurrent: true, collapse: true, locale: moment.locale(), defaultDate: false, disabledDates: false, enabledDates: false, icons: { time: 'fa fa-clock-o', date: 'fa fa-calendar', up: 'fa fa-arrow-up', down: 'fa fa-arrow-down', previous: 'fa fa-chevron-left', next: 'fa fa-chevron-right', today: 'fa fa-calendar-check-o', clear: 'fa fa-delete', close: 'fa fa-times' }, tooltips: { today: 'Go to today', clear: 'Clear selection', close: 'Close the picker', selectMonth: 'Select Month', prevMonth: 'Previous Month', nextMonth: 'Next Month', selectYear: 'Select Year', prevYear: 'Previous Year', nextYear: 'Next Year', selectDecade: 'Select Decade', prevDecade: 'Previous Decade', nextDecade: 'Next Decade', prevCentury: 'Previous Century', nextCentury: 'Next Century', pickHour: 'Pick Hour', incrementHour: 'Increment Hour', decrementHour: 'Decrement Hour', pickMinute: 'Pick Minute', incrementMinute: 'Increment Minute', decrementMinute: 'Decrement Minute', pickSecond: 'Pick Second', incrementSecond: 'Increment Second', decrementSecond: 'Decrement Second', togglePeriod: 'Toggle Period', selectTime: 'Select Time', selectDate: 'Select Date' }, useStrict: false, sideBySide: false, daysOfWeekDisabled: false, calendarWeeks: false, viewMode: 'days', toolbarPlacement: 'default', buttons: { showToday: false, showClear: false, showClose: false }, widgetPositioning: { horizontal: 'auto', vertical: 'auto' }, widgetParent: null, ignoreReadonly: false, keepOpen: false, focusOnShow: true, inline: false, keepInvalid: false, keyBinds: { up: function up() { if (!this.widget) { return false; } var d = this._dates[0] || this.getMoment(); if (this.widget.find('.datepicker').is(':visible')) { this.date(d.clone().subtract(7, 'd')); } else { this.date(d.clone().add(this.stepping(), 'm')); } return true; }, down: function down() { if (!this.widget) { this.show(); return false; } var d = this._dates[0] || this.getMoment(); if (this.widget.find('.datepicker').is(':visible')) { this.date(d.clone().add(7, 'd')); } else { this.date(d.clone().subtract(this.stepping(), 'm')); } return true; }, 'control up': function controlUp() { if (!this.widget) { return false; } var d = this._dates[0] || this.getMoment(); if (this.widget.find('.datepicker').is(':visible')) { this.date(d.clone().subtract(1, 'y')); } else { this.date(d.clone().add(1, 'h')); } return true; }, 'control down': function controlDown() { if (!this.widget) { return false; } var d = this._dates[0] || this.getMoment(); if (this.widget.find('.datepicker').is(':visible')) { this.date(d.clone().add(1, 'y')); } else { this.date(d.clone().subtract(1, 'h')); } return true; }, left: function left() { if (!this.widget) { return false; } var d = this._dates[0] || this.getMoment(); if (this.widget.find('.datepicker').is(':visible')) { this.date(d.clone().subtract(1, 'd')); } return true; }, right: function right() { if (!this.widget) { return false; } var d = this._dates[0] || this.getMoment(); if (this.widget.find('.datepicker').is(':visible')) { this.date(d.clone().add(1, 'd')); } return true; }, pageUp: function pageUp() { if (!this.widget) { return false; } var d = this._dates[0] || this.getMoment(); if (this.widget.find('.datepicker').is(':visible')) { this.date(d.clone().subtract(1, 'M')); } return true; }, pageDown: function pageDown() { if (!this.widget) { return false; } var d = this._dates[0] || this.getMoment(); if (this.widget.find('.datepicker').is(':visible')) { this.date(d.clone().add(1, 'M')); } return true; }, enter: function enter() { this.hide(); return true; }, escape: function escape() { if (!this.widget) { return false; } this.hide(); return true; }, 'control space': function controlSpace() { if (!this.widget) { return false; } if (this.widget.find('.timepicker').is(':visible')) { this.widget.find('.btn[data-action="togglePeriod"]').click(); } return true; }, t: function t() { this.date(this.getMoment()); return true; }, 'delete': function _delete() { if (!this.widget) { return false; } this.clear(); return true; } }, debug: false, allowInputToggle: false, disabledTimeIntervals: false, disabledHours: false, enabledHours: false, viewDate: false, allowMultidate: false, multidateSeparator: ',' }, DatePickerModes = [{ CLASS_NAME: 'days', NAV_FUNCTION: 'M', NAV_STEP: 1 }, { CLASS_NAME: 'months', NAV_FUNCTION: 'y', NAV_STEP: 1 }, { CLASS_NAME: 'years', NAV_FUNCTION: 'y', NAV_STEP: 10 }, { CLASS_NAME: 'decades', NAV_FUNCTION: 'y', NAV_STEP: 100 }], KeyMap = { 'up': 38, 38: 'up', 'down': 40, 40: 'down', 'left': 37, 37: 'left', 'right': 39, 39: 'right', 'tab': 9, 9: 'tab', 'escape': 27, 27: 'escape', 'enter': 13, 13: 'enter', 'pageUp': 33, 33: 'pageUp', 'pageDown': 34, 34: 'pageDown', 'shift': 16, 16: 'shift', 'control': 17, 17: 'control', 'space': 32, 32: 'space', 't': 84, 84: 't', 'delete': 46, 46: 'delete' }, ViewModes = ['times', 'days', 'months', 'years', 'decades'], keyState = {}, keyPressHandled = {}; var MinViewModeNumber = 0; // ReSharper restore InconsistentNaming // ReSharper disable once DeclarationHides // ReSharper disable once InconsistentNaming var DateTimePicker = function () { /** @namespace eData.dateOptions */ /** @namespace moment.tz */ function DateTimePicker(element, options) { _classCallCheck(this, DateTimePicker); this._options = this._getOptions(options); this._element = element; this._dates = []; this._datesFormatted = []; this._viewDate = null; this.unset = true; this.component = false; this.widget = false; this.use24Hours = null; this.actualFormat = null; this.parseFormats = null; this.currentViewMode = null; this._int(); } /** * @return {string} */ //private DateTimePicker.prototype._int = function _int() { var targetInput = this._element.data('target-input'); if (this._element.is('input')) { this.input = this._element; } else if (targetInput !== undefined) { if (targetInput === 'nearest') { this.input = this._element.find('input'); } else { this.input = $(targetInput); } } this._dates = []; this._dates[0] = this.getMoment(); this._viewDate = this.getMoment().clone(); $.extend(true, this._options, this._dataToOptions()); this.options(this._options); this._initFormatting(); if (this.input !== undefined && this.input.is('input') && this.input.val().trim().length !== 0) { this._setValue(this._parseInputDate(this.input.val().trim()), 0); } else if (this._options.defaultDate && this.input !== undefined && this.input.attr('placeholder') === undefined) { this._setValue(this._options.defaultDate, 0); } if (this._options.inline) { this.show(); } }; DateTimePicker.prototype._update = function _update() { if (!this.widget) { return; } this._fillDate(); this._fillTime(); }; DateTimePicker.prototype._setValue = function _setValue(targetMoment, index) { var oldDate = this.unset ? null : this._dates[index]; var outpValue = ''; // case of calling setValue(null or false) if (!targetMoment) { if (!this._options.allowMultidate || this._dates.length === 1) { this.unset = true; this._dates = []; this._datesFormatted = []; } else { outpValue = this._element.data('date') + ','; outpValue = outpValue.replace(oldDate.format(this.actualFormat) + ',', '').replace(',,', '').replace(/,\s*$/, ''); this._dates.splice(index, 1); this._datesFormatted.splice(index, 1); } if (this.input !== undefined) { this.input.val(outpValue); this.input.trigger('input'); } this._element.data('date', outpValue); this._notifyEvent({ type: DateTimePicker.Event.CHANGE, date: false, oldDate: oldDate }); this._update(); return; } targetMoment = targetMoment.clone().locale(this._options.locale); if (this._hasTimeZone()) { targetMoment.tz(this._options.timeZone); } if (this._options.stepping !== 1) { targetMoment.minutes(Math.round(targetMoment.minutes() / this._options.stepping) * this._options.stepping).seconds(0); } if (this._isValid(targetMoment)) { this._dates[index] = targetMoment; this._datesFormatted[index] = targetMoment.format('YYYY-MM-DD'); this._viewDate = targetMoment.clone(); if (this._options.allowMultidate && this._dates.length > 1) { for (var i = 0; i < this._dates.length; i++) { outpValue += '' + this._dates[i].format(this.actualFormat) + this._options.multidateSeparator; } outpValue = outpValue.replace(/,\s*$/, ''); } else { outpValue = this._dates[index].format(this.actualFormat); } if (this.input !== undefined) { this.input.val(outpValue); this.input.trigger('input'); } this._element.data('date', outpValue); this.unset = false; this._update(); this._notifyEvent({ type: DateTimePicker.Event.CHANGE, date: this._dates[index].clone(), oldDate: oldDate }); } else { if (!this._options.keepInvalid) { if (this.input !== undefined) { this.input.val('' + (this.unset ? '' : this._dates[index].format(this.actualFormat))); this.input.trigger('input'); } } else { this._notifyEvent({ type: DateTimePicker.Event.CHANGE, date: targetMoment, oldDate: oldDate }); } this._notifyEvent({ type: DateTimePicker.Event.ERROR, date: targetMoment, oldDate: oldDate }); } }; DateTimePicker.prototype._change = function _change(e) { var val = $(e.target).val().trim(), parsedDate = val ? this._parseInputDate(val) : null; this._setValue(parsedDate); e.stopImmediatePropagation(); return false; }; //noinspection JSMethodCanBeStatic DateTimePicker.prototype._getOptions = function _getOptions(options) { options = $.extend(true, {}, Default, options); return options; }; DateTimePicker.prototype._hasTimeZone = function _hasTimeZone() { return moment.tz !== undefined && this._options.timeZone !== undefined && this._options.timeZone !== null && this._options.timeZone !== ''; }; DateTimePicker.prototype._isEnabled = function _isEnabled(granularity) { if (typeof granularity !== 'string' || granularity.length > 1) { throw new TypeError('isEnabled expects a single character string parameter'); } switch (granularity) { case 'y': return this.actualFormat.indexOf('Y') !== -1; case 'M': return this.actualFormat.indexOf('M') !== -1; case 'd': return this.actualFormat.toLowerCase().indexOf('d') !== -1; case 'h': case 'H': return this.actualFormat.toLowerCase().indexOf('h') !== -1; case 'm': return this.actualFormat.indexOf('m') !== -1; case 's': return this.actualFormat.indexOf('s') !== -1; default: return false; } }; DateTimePicker.prototype._hasTime = function _hasTime() { return this._isEnabled('h') || this._isEnabled('m') || this._isEnabled('s'); }; DateTimePicker.prototype._hasDate = function _hasDate() { return this._isEnabled('y') || this._isEnabled('M') || this._isEnabled('d'); }; DateTimePicker.prototype._dataToOptions = function _dataToOptions() { var eData = this._element.data(); var dataOptions = {}; if (eData.dateOptions && eData.dateOptions instanceof Object) { dataOptions = $.extend(true, dataOptions, eData.dateOptions); } $.each(this._options, function (key) { var attributeName = 'date' + key.charAt(0).toUpperCase() + key.slice(1); //todo data api key if (eData[attributeName] !== undefined) { dataOptions[key] = eData[attributeName]; } else { delete dataOptions[key]; } }); return dataOptions; }; DateTimePicker.prototype._notifyEvent = function _notifyEvent(e) { if (e.type === DateTimePicker.Event.CHANGE && e.date && e.date.isSame(e.oldDate) || !e.date && !e.oldDate) { return; } this._element.trigger(e); }; DateTimePicker.prototype._viewUpdate = function _viewUpdate(e) { if (e === 'y') { e = 'YYYY'; } this._notifyEvent({ type: DateTimePicker.Event.UPDATE, change: e, viewDate: this._viewDate.clone() }); }; DateTimePicker.prototype._showMode = function _showMode(dir) { if (!this.widget) { return; } if (dir) { this.currentViewMode = Math.max(MinViewModeNumber, Math.min(3, this.currentViewMode + dir)); } this.widget.find('.datepicker > div').hide().filter('.datepicker-' + DatePickerModes[this.currentViewMode].CLASS_NAME).show(); }; DateTimePicker.prototype._isInDisabledDates = function _isInDisabledDates(testDate) { return this._options.disabledDates[testDate.format('YYYY-MM-DD')] === true; }; DateTimePicker.prototype._isInEnabledDates = function _isInEnabledDates(testDate) { return this._options.enabledDates[testDate.format('YYYY-MM-DD')] === true; }; DateTimePicker.prototype._isInDisabledHours = function _isInDisabledHours(testDate) { return this._options.disabledHours[testDate.format('H')] === true; }; DateTimePicker.prototype._isInEnabledHours = function _isInEnabledHours(testDate) { return this._options.enabledHours[testDate.format('H')] === true; }; DateTimePicker.prototype._isValid = function _isValid(targetMoment, granularity) { if (!targetMoment.isValid()) { return false; } if (this._options.disabledDates && granularity === 'd' && this._isInDisabledDates(targetMoment)) { return false; } if (this._options.enabledDates && granularity === 'd' && !this._isInEnabledDates(targetMoment)) { return false; } if (this._options.minDate && targetMoment.isBefore(this._options.minDate, granularity)) { return false; } if (this._options.maxDate && targetMoment.isAfter(this._options.maxDate, granularity)) { return false; } if (this._options.daysOfWeekDisabled && granularity === 'd' && this._options.daysOfWeekDisabled.indexOf(targetMoment.day()) !== -1) { return false; } if (this._options.disabledHours && (granularity === 'h' || granularity === 'm' || granularity === 's') && this._isInDisabledHours(targetMoment)) { return false; } if (this._options.enabledHours && (granularity === 'h' || granularity === 'm' || granularity === 's') && !this._isInEnabledHours(targetMoment)) { return false; } if (this._options.disabledTimeIntervals && (granularity === 'h' || granularity === 'm' || granularity === 's')) { var found = false; $.each(this._options.disabledTimeIntervals, function () { if (targetMoment.isBetween(this[0], this[1])) { found = true; return false; } }); if (found) { return false; } } return true; }; DateTimePicker.prototype._parseInputDate = function _parseInputDate(inputDate) { if (this._options.parseInputDate === undefined) { if (!moment.isMoment(inputDate)) { inputDate = this.getMoment(inputDate); } } else { inputDate = this._options.parseInputDate(inputDate); } //inputDate.locale(this.options.locale); return inputDate; }; DateTimePicker.prototype._keydown = function _keydown(e) { var handler = null, index = void 0, index2 = void 0, keyBindKeys = void 0, allModifiersPressed = void 0; var pressedKeys = [], pressedModifiers = {}, currentKey = e.which, pressed = 'p'; keyState[currentKey] = pressed; for (index in keyState) { if (keyState.hasOwnProperty(index) && keyState[index] === pressed) { pressedKeys.push(index); if (parseInt(index, 10) !== currentKey) { pressedModifiers[index] = true; } } } for (index in this._options.keyBinds) { if (this._options.keyBinds.hasOwnProperty(index) && typeof this._options.keyBinds[index] === 'function') { keyBindKeys = index.split(' '); if (keyBindKeys.length === pressedKeys.length && KeyMap[currentKey] === keyBindKeys[keyBindKeys.length - 1]) { allModifiersPressed = true; for (index2 = keyBindKeys.length - 2; index2 >= 0; index2--) { if (!(KeyMap[keyBindKeys[index2]] in pressedModifiers)) { allModifiersPressed = false; break; } } if (allModifiersPressed) { handler = this._options.keyBinds[index]; break; } } } } if (handler) { if (handler.call(this.widget)) { e.stopPropagation(); e.preventDefault(); } } }; //noinspection JSMethodCanBeStatic,SpellCheckingInspection DateTimePicker.prototype._keyup = function _keyup(e) { keyState[e.which] = 'r'; if (keyPressHandled[e.which]) { keyPressHandled[e.which] = false; e.stopPropagation(); e.preventDefault(); } }; DateTimePicker.prototype._indexGivenDates = function _indexGivenDates(givenDatesArray) { // Store given enabledDates and disabledDates as keys. // This way we can check their existence in O(1) time instead of looping through whole array. // (for example: options.enabledDates['2014-02-27'] === true) var givenDatesIndexed = {}, self = this; $.each(givenDatesArray, function () { var dDate = self._parseInputDate(this); if (dDate.isValid()) { givenDatesIndexed[dDate.format('YYYY-MM-DD')] = true; } }); return Object.keys(givenDatesIndexed).length ? givenDatesIndexed : false; }; DateTimePicker.prototype._indexGivenHours = function _indexGivenHours(givenHoursArray) { // Store given enabledHours and disabledHours as keys. // This way we can check their existence in O(1) time instead of looping through whole array. // (for example: options.enabledHours['2014-02-27'] === true) var givenHoursIndexed = {}; $.each(givenHoursArray, function () { givenHoursIndexed[this] = true; }); return Object.keys(givenHoursIndexed).length ? givenHoursIndexed : false; }; DateTimePicker.prototype._initFormatting = function _initFormatting() { var format = this._options.format || 'L LT', self = this; this.actualFormat = format.replace(/(\[[^\[]*])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g, function (formatInput) { return self._dates[0].localeData().longDateFormat(formatInput) || formatInput; //todo taking the first date should be ok }); this.parseFormats = this._options.extraFormats ? this._options.extraFormats.slice() : []; if (this.parseFormats.indexOf(format) < 0 && this.parseFormats.indexOf(this.actualFormat) < 0) { this.parseFormats.push(this.actualFormat); } this.use24Hours = this.actualFormat.toLowerCase().indexOf('a') < 1 && this.actualFormat.replace(/\[.*?]/g, '').indexOf('h') < 1; if (this._isEnabled('y')) { MinViewModeNumber = 2; } if (this._isEnabled('M')) { MinViewModeNumber = 1; } if (this._isEnabled('d')) { MinViewModeNumber = 0; } this.currentViewMode = Math.max(MinViewModeNumber, this.currentViewMode); if (!this.unset) { this._setValue(this._dates[0], 0); } }; DateTimePicker.prototype._getLastPickedDate = function _getLastPickedDate() { return this._dates[this._getLastPickedDateIndex()]; }; DateTimePicker.prototype._getLastPickedDateIndex = function _getLastPickedDateIndex() { return this._dates.length - 1; }; //public DateTimePicker.prototype.getMoment = function getMoment(d) { var returnMoment = void 0; if (d === undefined || d === null) { returnMoment = moment(); //TODO should this use format? and locale? } else if (this._hasTimeZone()) { // There is a string to parse and a default time zone // parse with the tz function which takes a default time zone if it is not in the format string returnMoment = moment.tz(d, this.parseFormats, this._options.useStrict, this._options.timeZone); } else { returnMoment = moment(d, this.parseFormats, this._options.useStrict); } if (this._hasTimeZone()) { returnMoment.tz(this._options.timeZone); } return returnMoment; }; DateTimePicker.prototype.toggle = function toggle() { return this.widget ? this.hide() : this.show(); }; DateTimePicker.prototype.ignoreReadonly = function ignoreReadonly(_ignoreReadonly) { if (arguments.length === 0) { return this._options.ignoreReadonly; } if (typeof _ignoreReadonly !== 'boolean') { throw new TypeError('ignoreReadonly () expects a boolean parameter'); } this._options.ignoreReadonly = _ignoreReadonly; }; DateTimePicker.prototype.options = function options(newOptions) { if (arguments.length === 0) { return $.extend(true, {}, this._options); } if (!(newOptions instanceof Object)) { throw new TypeError('options() this.options parameter should be an object'); } $.extend(true, this._options, newOptions); var self = this; $.each(this._options, function (key, value) { if (self[key] !== undefined) { self[key](value); } }); }; DateTimePicker.prototype.date = function date(newDate, index) { index = index || 0; if (arguments.length === 0) { if (this.unset) { return null; } if (this._options.allowMultidate) { return this._dates.join(this._options.multidateSeparator); } else { return this._dates[index].clone(); } } if (newDate !== null && typeof newDate !== 'string' && !moment.isMoment(newDate) && !(newDate instanceof Date)) { throw new TypeError('date() parameter must be one of [null, string, moment or Date]'); } this._setValue(newDate === null ? null : this._parseInputDate(newDate), index); }; DateTimePicker.prototype.format = function format(newFormat) { ///<summary>test su</summary> ///<param name="newFormat">info about para</param> ///<returns type="string|boolean">returns foo</returns> if (arguments.length === 0) { return this._options.format; } if (typeof newFormat !== 'string' && (typeof newFormat !== 'boolean' || newFormat !== false)) { throw new TypeError('format() expects a string or boolean:false parameter ' + newFormat); } this._options.format = newFormat; if (this.actualFormat) { this._initFormatting(); // reinitialize formatting } }; DateTimePicker.prototype.timeZone = function timeZone(newZone) { if (arguments.length === 0) { return this._options.timeZone; } if (typeof newZone !== 'string') { throw new TypeError('newZone() expects a string parameter'); } this._options.timeZone = newZone; }; DateTimePicker.prototype.dayViewHeaderFormat = function dayViewHeaderFormat(newFormat) { if (arguments.length === 0) { return this._options.dayViewHeaderFormat; } if (typeof newFormat !== 'string') { throw new TypeError('dayViewHeaderFormat() expects a string parameter'); } this._options.dayViewHeaderFormat = newFormat; }; DateTimePicker.prototype.extraFormats = function extraFormats(formats) { if (arguments.length === 0) { return this._options.extraFormats; } if (formats !== false && !(formats instanceof Array)) { throw new TypeError('extraFormats() expects an array or false parameter'); } this._options.extraFormats = formats; if (this.parseFormats) { this._initFormatting(); // reinit formatting } }; DateTimePicker.prototype.disabledDates = function disabledDates(dates) { if (arguments.length === 0) { return this._options.disabledDates ? $.extend({}, this._options.disabledDates) : this._options.disabledDates; } if (!dates) { this._options.disabledDates = false; this._update(); return true; } if (!(dates instanceof Array)) { throw new TypeError('disabledDates() expects an array parameter'); } this._options.disabledDates = this._indexGivenDates(dates); this._options.enabledDates = false; this._update(); }; DateTimePicker.prototype.enabledDates = function enabledDates(dates) { if (arguments.length === 0) { return this._options.enabledDates ? $.extend({}, this._options.enabledDates) : this._options.enabledDates; } if (!dates) { this._options.enabledDates = false; this._update(); return true; } if (!(dates instanceof Array)) { throw new TypeError('enabledDates() expects an array parameter'); } this._options.enabledDates = this._indexGivenDates(dates); this._options.disabledDates = false; this._update(); }; DateTimePicker.prototype.daysOfWeekDisabled = function daysOfWeekDisabled(_daysOfWeekDisabled) { if (arguments.length === 0) { return this._options.daysOfWeekDisabled.splice(0); } if (typeof _daysOfWeekDisabled === 'boolean' && !_daysOfWeekDisabled) { this._options.daysOfWeekDisabled = false; this._update(); return true; } if (!(_daysOfWeekDisabled instanceof Array)) { throw new TypeError('daysOfWeekDisabled() expects an array parameter'); } this._options.daysOfWeekDisabled = _daysOfWeekDisabled.reduce(function (previousValue, currentValue) { currentValue = parseInt(currentValue, 10); if (currentValue > 6 || currentValue < 0 || isNaN(currentValue)) { return previousValue; } if (previousValue.indexOf(currentValue) === -1) { previousValue.push(currentValue); } return previousValue; }, []).sort(); if (this._options.useCurrent && !this._options.keepInvalid) { for (var i = 0; i < this._dates.length; i++) { var tries = 0; while (!this._isValid(this._dates[i], 'd')) { this._dates[i].add(1, 'd'); if (tries === 31) { throw 'Tried 31 times to find a valid date'; } tries++; } this._setValue(this._dates[i], i); } } this._update(); }; DateTimePicker.prototype.maxDate = function maxDate(_maxDate) { if (arguments.length === 0) { return this._options.maxDate ? this._options.maxDate.clone() : this._options.maxDate; } if (typeof _maxDate === 'boolean' && _maxDate === false) { this._options.maxDate = false; this._update(); return true; } if (typeof _maxDate === 'string') { if (_maxDate === 'now' || _maxDate === 'moment') { _maxDate = this.getMoment(); } } var parsedDate = this._parseInputDate(_maxDate); if (!parsedDate.isValid()) { throw new TypeError('maxDate() Could not parse date parameter: ' + _maxDate); } if (this._options.minDate && parsedDate.isBefore(this._options.minDate)) { throw new TypeError('maxDate() date parameter is before this.options.minDate: ' + parsedDate.format(this.actualFormat)); } this._options.maxDate = parsedDate; for (var i = 0; i < this._dates.length; i++) { if (this._options.useCurrent && !this._options.keepInvalid && this._dates[i].isAfter(_maxDate)) { this._setValue(this._options.maxDate, i); } } if (this._viewDate.isAfter(parsedDate)) { this._viewDate = parsedDate.clone().subtract(this._options.stepping, 'm'); } this._update(); }; DateTimePicker.prototype.minDate = function minDate(_minDate) { if (arguments.length === 0) { return this._options.minDate ? this._options.minDate.clone() : this._options.minDate; } if (typeof _minDate === 'boolean' && _minDate === false) { this._options.minDate = false; this._update(); return true; } if (typeof _minDate === 'string') { if (_minDate === 'now' || _minDate === 'moment') { _minDate = this.getMoment(); } } var parsedDate = this._parseInputDate(_minDate); if (!parsedDate.isValid()) { throw new TypeError('minDate() Could not parse date parameter: ' + _minDate); } if (this._options.maxDate && parsedDate.isAfter(this._options.maxDate)) { throw new TypeError('minDate() date parameter is after this.options.maxDate: ' + parsedDate.format(this.actualFormat)); } this._options.minDate = parsedDate; for (var i = 0; i < this._dates.length; i++) { if (this._options.useCurrent && !this._options.keepInvalid && this._dates[i].isBefore(_minDate)) { this._setValue(this._options.minDate, i); } } if (this._viewDate.isBefore(parsedDate)) { this._viewDate = parsedDate.clone().add(this._options.stepping, 'm'); } this._update(); }; DateTimePicker.prototype.defaultDate = function defaultDate(_defaultDate) { if (arguments.length === 0) { return this._options.defaultDate ? this._options.defaultDate.clone() : this._options.defaultDate; } if (!_defaultDate) { this._options.defaultDate = false; return true; } if (typeof _defaultDate === 'string') { if (_defaultDate === 'now' || _defaultDate === 'moment') { _defaultDate = this.getMoment(); } else { _defaultDate = this.getMoment(_defaultDate); } } var parsedDate = this._parseInputDate(_defaultDate); if (!parsedDate.isValid()) { throw new TypeError('defaultDate() Could not parse date parameter: ' + _defaultDate); } if (!this._isValid(parsedDate)) { throw new TypeError('defaultDate() date passed is invalid according to component setup validations'); } this._options.defaultDate = parsedDate; if (this._options.defaultDate && this._options.inline || this.input !== undefined && this.input.val().trim() === '') { this._setValue(this._options.defaultDate, 0); } }; DateTimePicker.prototype.locale = function locale(_locale) { if (arguments.length === 0) { return this._options.locale; } if (!moment.localeData(_locale)) { throw new TypeError('locale() locale ' + _locale + ' is not loaded from moment locales!'); } for (var i = 0; i < this._dates.length; i++) { this._dates[i].locale(this._options.locale); } this._viewDate.locale(this._options.locale); if (this.actualFormat) { this._initFormatting(); // reinitialize formatting } if (this.widget) { this.hide(); this.show(); } }; DateTimePicker.prototype.stepping = function stepping(_stepping) { if (arguments.length === 0) { return this._options.stepping; } _stepping = parseInt(_stepping, 10); if (isNaN(_stepping) || _stepping < 1) { _stepping = 1; } this._options.stepping = _stepping; }; DateTimePicker.prototype.useCurrent = function useCurrent(_useCurrent) { var useCurrentOptions = ['year', 'month', 'day', 'hour', 'minute']; if (arguments.length === 0) { return this._options.useCurrent; } if (typeof _useCurrent !== 'boolean' && typeof _useCurrent !== 'string') { throw new TypeError('useCurrent() expects a boolean or string parameter'); } if (typeof _useCurrent === 'string' && useCurrentOptions.indexOf(_useCurrent.toLowerCase()) === -1) { throw new TypeError('useCurrent() expects a string parameter of ' + useCurrentOptions.join(', ')); } this._options.useCurrent = _useCurrent; }; DateTimePicker.prototype.collapse = function collapse(_collapse) { if (arguments.length === 0) { return this._options.collapse; } if (typeof _collapse !== 'boolean') { throw new TypeError('collapse() expects a boolean parameter'); } if (this._options.collapse === _collapse) { return true; } this._options.collapse = _collapse; if (this.widget) { this.hide(); this.show(); } }; DateTimePicker.prototype.icons = function icons(_icons) { if (arguments.length === 0) { return $.extend({}, this._options.icons); } if (!(_icons instanceof Object)) { throw new TypeError('icons() expects parameter to be an Object'); } $.extend(this._options.icons, _icons); if (this.widget) { this.hide(); this.show(); } }; DateTimePicker.prototype.tooltips = function tooltips(_tooltips) { if (arguments.length === 0) { return $.extend({}, this._options.tooltips); } if (!(_tooltips instanceof Object)) { throw new TypeError('tooltips() expects parameter to be an Object'); } $.extend(this._options.tooltips, _tooltips); if (this.widget) { this.hide(); this.show(); } }; DateTimePicker.prototype.useStrict = function useStrict(_useStrict) { if (arguments.length === 0) { return this._options.useStrict; } if (typeof _useStrict !== 'boolean') { throw new TypeError('useStrict() expects a boolean parameter'); } this._options.useStrict = _useStrict; }; DateTimePicker.prototype.sideBySide = function sideBySide(_sideBySide) { if (arguments.length === 0) { return this._options.sideBySide; } if (typeof _sideBySide !== 'boolean') { throw new TypeError('sideBySide() expects a boolean parameter'); } this._options.sideBySide = _sideBySide; if (this.widget) { this.hide(); this.show(); } }; DateTimePicker.prototype.viewMode = function viewMode(_viewMode) { if (arguments.length === 0) { return this._options.viewMode; } if (typeof _viewMode !== 'string') { throw new TypeError('viewMode() expects a string parameter'); } if (DateTimePicker.ViewModes.indexOf(_viewMode) === -1) { throw new TypeError('viewMode() parameter must be one of (' + DateTimePicker.ViewModes.join(', ') + ') value'); } this._options.viewMode = _viewMode; this.currentViewMode = Math.max(DateTimePicker.ViewModes.indexOf(_viewMode) - 1, DateTimePicker.MinViewModeNumber); this._showMode(); }; DateTimePicker.prototype.calendarWeeks = function calendarWeeks(_calendarWeeks) { if (arguments.length === 0) { return this._options.calendarWeeks; } if (typeof _calendarWeeks !== 'boolean') { throw new TypeError('calendarWeeks() expects parameter to be a boolean value'); } this._options.calendarWeeks = _calendarWeeks; this._update(); }; DateTimePicker.prototype.buttons = function buttons(_buttons) { if (arguments.length === 0) { return $.extend({}, this._options.buttons); } if (!(_buttons instanceof Object)) { throw new TypeError('buttons() expects parameter to be an Object'); } $.extend(this._options.buttons, _buttons); if (typeof this._options.buttons.showToday !== 'boolean') { throw new TypeError('buttons.showToday expects a boolean parameter'); } if (typeof this._options.buttons.showClear !== 'boolean') { throw new TypeError('buttons.showClear expects a boolean parameter'); } if (typeof this._options.buttons.showClose !== 'boolean') { throw new TypeError('buttons.showClose expects a boolean parameter'); } if (this.widget) { this.hide(); this.show(); } }; DateTimePicker.prototype.keepOpen = function keepOpen(_keepOpen) { if (arguments.length === 0) { return this._options.keepOpen; } if (typeof _keepOpen !== 'boolean') { throw new TypeError('keepOpen() expects a boolean parameter'); } this._options.keepOpen = _keepOpen; }; DateTimePicker.prototype.focusOnShow = function focusOnShow(_focusOnShow) { if (arguments.length === 0) { return this._options.focusOnShow; } if (typeof _focusOnShow !== 'boolean') { throw new TypeError('focusOnShow() expects a boolean parameter'); } this._options.focusOnShow = _focusOnShow; }; DateTimePicker.prototype.inline = function inline(_inline) { if (arguments.length === 0) { return this._options.inline; } if (typeof _inline !== 'boolean') { throw new TypeError('inline() expects a boolean parameter'); } this._options.inline = _inline; }; DateTimePicker.prototype.clear = function clear() { this._setValue(null); //todo }; DateTimePicker.prototype.keyBinds = function keyBinds(_keyBinds) { if (arguments.length === 0) { return this._options.keyBinds; } this._options.keyBinds = _keyBinds; }; DateTimePicker.prototype.debug = function debug(_debug) { if (typeof _debug !== 'boolean') { throw new TypeError('debug() expects a boolean parameter'); } this._options.debug = _debug; }; DateTimePicker.prototype.allowInputToggle = function allowInputToggle(_allowInputToggle) { if (arguments.length === 0) { return this._options.allowInputToggle; } if (typeof _allowInputToggle !== 'boolean') { throw new TypeError('allowInputToggle() expects a boolean parameter'); } this._options.allowInputToggle = _allowInputToggle; }; DateTimePicker.prototype.keepInvalid = function keepInvalid(_keepInvalid) { if (arguments.length === 0) { return this._options.keepInvalid; } if (typeof _keepInvalid !== 'boolean') { throw new TypeError('keepInvalid() expects a boolean parameter'); } this._options.keepInvalid = _keepInvalid; }; DateTimePicker.prototype.datepickerInput = function datepickerInput(_datepickerInput) { if (arguments.length === 0) { return this._options.datepickerInput; } if (typeof _datepickerInput !== 'string') { throw new TypeError('datepickerInput() expects a string parameter'); } this._options.datepickerInput = _datepickerInput; }; DateTimePicker.prototype.parseInputDate = function parseInputDate(_parseInputDate2) { if (arguments.length === 0) { return this._options.parseInputDate; } if (typeof _parseInputDate2 !== 'function') { throw new TypeError('parseInputDate() should be as function'); } this._options.parseInputDate = _parseInputDate2; }; DateTimePicker.prototype.disabledTimeIntervals = function disabledTimeIntervals(_disabledTimeIntervals) { if (arguments.length === 0) { return this._options.disabledTimeIntervals ? $.extend({}, this._options.disabledTimeIntervals) : this._options.disabledTimeIntervals; } if (!_disabledTimeIntervals) { this._options.disabledTimeIntervals = false; this._update(); return true; } if (!(_disabledTimeIntervals instanceof Array)) { throw new TypeError('disabledTimeIntervals() expects an array parameter'); } this._options.disabledTimeIntervals = _disabledTimeIntervals; this._update(); }; DateTimePicker.prototype.disabledHours = function disabledHours(hours) { if (arguments.length === 0) { return this._options.disabledHours ? $.extend({}, this._options.disabledHours) : this._options.disabledHours; } if (!hours) { this._options.disabledHours = false; this._update(); return true; } if (!(hours instanceof Array)) { throw new TypeError('disabledHours() expects an array parameter'); } this._options.disabledHours = this._indexGivenHours(hours); this._options.enabledHours = false; if (this._options.useCurrent && !this._options.keepInvalid) { for (var i = 0; i < this._dates.length; i++) { var tries = 0; while (!this._isValid(this._dates[i], 'h')) { this._dates[i].add(1, 'h'); if (tries === 24) { throw 'Tried 24 times to find a valid date'; } tries++; } this._setValue(this._dates[i], i); } } this._update(); }; DateTimePicker.prototype.enabledHours = function enabledHours(hours) { if (arguments.length === 0) { return this._options.enabledHours ? $.extend({}, this._options.enabledHours) : this._options.enabledHours; } if (!hours) { this._options.enabledHours = false; this._update(); return true; } if (!(hours instanceof Array)) { throw new TypeError('enabledHours() expects an array parameter'); } this._options.enabledHours = this._indexGivenHours(hours); this._options.disabledHours = false; if (this._options.useCurrent && !this._options.keepInvalid) { for (var i = 0; i < this._dates.length; i++) { var tries = 0; while (!this._isValid(this._dates[i], 'h')) { this._dates[i].add(1, 'h'); if (tries === 24) { throw 'Tried 24 times to find a valid date'; } tries++; } this._setValue(this._dates[i], i); } } this._update(); }; DateTimePicker.prototype.viewDate = function viewDate(newDate) { if (arguments.length === 0) { return this._viewDate.clone(); } if (!newDate) { this._viewDate = (this._dates[0] || this.getMoment()).clone(); return true; } if (typeof newDate !== 'string' && !moment.isMoment(newDate) && !(newDate instanceof Date)) { throw new TypeError('viewDate() parameter must be one of [string, moment or Date]'); } this._viewDate = this._parseInputDate(newDate); this._viewUpdate(); }; DateTimePicker.prototype.allowMultidate = function allowMultidate(_allowMultidate) { if (typeof _allowMultidate !== 'boolean') { throw new TypeError('allowMultidate() expects a boolean parameter'); } this._options.allowMultidate = _allowMultidate; }; DateTimePicker.prototype.multidateSeparator = function multidateSeparator(_multidateSeparator) { if (arguments.length === 0) { return this._options.multidateSeparator; } if (typeof _multidateSeparator !== 'string' || _multidateSeparator.length > 1) { throw new TypeError('multidateSeparator expects a single character string parameter'); } this._options.multidateSeparator = _multidateSeparator; }; _createClass(DateTimePicker, null, [{ key: 'NAME', get: function get() { return NAME; } /** * @return {string} */ }, { key: 'VERSION', get: function get() { return VERSION; } /** * @return {string} */ }, { key: 'DATA_KEY', get: function get() { return DATA_KEY; } /** * @return {string} */ }, { key: 'EVENT_KEY', get: function get() { return EVENT_KEY; } /** * @return {string} */ }, { key: 'DATA_API_KEY', get: function get() { return DATA_API_KEY; } }, { key: 'DatePickerModes', get: function get() { return DatePickerModes; } }, { key: 'ViewModes', get: function get() { return ViewModes; } /** * @return {number} */ }, { key: 'MinViewModeNumber', get: function get() { return MinViewModeNumber; } }, { key: 'Event', get: function get() { return Event; } }, { key: 'Selector', get: function get() { return Selector; } }, { key: 'Default', get: function get() { return Default; } }, { key: 'ClassName', get: function get() { return ClassName; } }]); return DateTimePicker; }(); return DateTimePicker; }(jQuery, moment); //noinspection JSUnusedGlobalSymbols /* global DateTimePicker */ var TempusDominusBootstrap4 = function ($) { // eslint-disable-line no-unused-vars // ReSharper disable once InconsistentNaming var JQUERY_NO_CONFLICT = $.fn[DateTimePicker.NAME], verticalModes = ['top', 'bottom', 'auto'], horizontalModes = ['left', 'right', 'auto'], toolbarPlacements = ['default', 'top', 'bottom'], getSelectorFromElement = function getSelectorFromElement($element) { var selector = $element.data('target'), $selector = void 0; if (!selector) { selector = $element.attr('href') || ''; selector = /^#[a-z]/i.test(selector) ? selector : null; } $selector = $(selector); if ($selector.length === 0) { return $selector; } if (!$selector.data(DateTimePicker.DATA_KEY)) { $.extend({}, $selector.data(), $(this).data()); } return $selector; }; // ReSharper disable once InconsistentNaming var TempusDominusBootstrap4 = function (_DateTimePicker) { _inherits(TempusDominusBootstrap4, _DateTimePicker); function TempusDominusBootstrap4(element, options) { _classCallCheck(this, TempusDominusBootstrap4); var _this = _possibleConstructorReturn(this, _DateTimePicker.call(this, element, options)); _this._init(); return _this; } TempusDominusBootstrap4.prototype._init = function _init() { if (this._element.hasClass('input-group')) { // in case there is more then one 'input-group-addon' Issue #48 var datepickerButton = this._element.find('.datepickerbutton'); if (datepickerButton.length === 0) { this.component = this._element.find('.input-group-addon'); } else { this.component = datepickerButton; } } }; TempusDominusBootstrap4.prototype._getDatePickerTemplate = function _getDatePickerTemplate() { var headTemplate = $('<thead>').append($('<tr>').append($('<th>').addClass('prev').attr('data-action', 'previous').append($('<span>').addClass(this._options.icons.previous))).append($('<th>').addClass('picker-switch').attr('data-action', 'pickerSwitch').attr('colspan', '' + (this._options.calendarWeeks ? '6' : '5'))).append($('<th>').addClass('next').attr('data-action', 'next').append($('<span>').addClass(this._options.icons.next)))), contTemplate = $('<tbody>').append($('<tr>').append($('<td>').attr('colspan', '' + (this._options.calendarWeeks ? '8' : '7')))); return [$('<div>').addClass('datepicker-days').append($('<table>').addClass('table table-sm').append(headTemplate).append($('<tbody>'))), $('<div>').addClass('datepicker-months').append($('<table>').addClass('table-condensed').append(headTemplate.clone()).append(contTemplate.clone())), $('<div>').addClass('datepicker-years').append($('<table>').addClass('table-condensed').append(headTemplate.clone()).append(contTemplate.clone())), $('<div>').addClass('datepicker-decades').append($('<table>').addClass('table-condensed').append(headTemplate.clone()).append(contTemplate.clone()))]; }; TempusDominusBootstrap4.prototype._getTimePickerMainTemplate = function _getTimePickerMainTemplate() { var topRow = $('<tr>'), middleRow = $('<tr>'), bottomRow = $('<tr>'); if (this._isEnabled('h')) { topRow.append($('<td>').append($('<a>').attr({ href: '#', tabindex: '-1', 'title': this._options.tooltips.incrementHour }).addClass('btn').attr('data-action', 'incrementHours').append($('<span>').addClass(this._options.icons.up)))); middleRow.append($('<td>').append($('<span>').addClass('timepicker-hour').attr({ 'data-time-component': 'hours', 'title': this._options.tooltips.pickHour }).attr('data-action', 'showHours'))); bottomRow.append($('<td>').append($('<a>').attr({ href: '#', tabindex: '-1', 'title': this._options.tooltips.decrementHour }).addClass('btn').attr('data-action', 'decrementHours').append($('<span>').addClass(this._options.icons.down)))); } if (this._isEnabled('m')) { if (this._isEnabled('h')) { topRow.append($('<td>').addClass('separator')); middleRow.append($('<td>').addClass('separator').html(':')); bottomRow.append($('<td>').addClass('separator')); } topRow.append($('<td>').append($('<a>').attr({ href: '#', tabindex: '-1', 'title': this._options.tooltips.incrementMinute }).addClass('btn').attr('data-action', 'incrementMinutes').append($('<span>').addClass(this._options.icons.up)))); middleRow.append($('<td>').append($('<span>').addClass('timepicker-minute').attr({ 'data-time-component': 'minutes', 'title': this._options.tooltips.pickMinute }).attr('data-action', 'showMinutes'))); bottomRow.append($('<td>').append($('<a>').attr({ href: '#', tabindex: '-1', 'title': this._options.tooltips.decrementMinute }).addClass('btn').attr('data-action', 'decrementMinutes').append($('<span>').addClass(this._options.icons.down)))); } if (this._isEnabled('s')) { if (this._isEnabled('m')) { topRow.append($('<td>').addClass('separator')); middleRow.append($('<td>').addClass('separator').html(':')); bottomRow.append($('<td>').addClass('separator')); } topRow.append($('<td>').append($('<a>').attr({ href: '#', tabindex: '-1', 'title': this._options.tooltips.incrementSecond }).addClass('btn').attr('data-action', 'incrementSeconds').append($('<span>').addClass(this._options.icons.up)))); middleRow.append($('<td>').append($('<span>').addClass('timepicker-second').attr({ 'data-time-component': 'seconds', 'title': this._options.tooltips.pickSecond }).attr('data-action', 'showSeconds'))); bottomRow.append($('<td>').append($('<a>').attr({ href: '#', tabindex: '-1', 'title': this._options.tooltips.decrementSecond }).addClass('btn').attr('data-action', 'decrementSeconds').append($('<span>').addClass(this._options.icons.down)))); } if (!this.use24Hours) { topRow.append($('<td>').addClass('separator')); middleRow.append($('<td>').append($('<button>').addClass('btn btn-primary').attr({ 'data-action': 'togglePeriod', tabindex: '-1', 'title': this._options.tooltips.togglePeriod }))); bottomRow.append($('<td>').addClass('separator')); } return $('<div>').addClass('timepicker-picker').append($('<table>').addClass('table-condensed').append([topRow, middleRow, bottomRow])); }; TempusDominusBootstrap4.prototype._getTimePickerTemplate = function _getTimePickerTemplate() { var hoursView = $('<div>').addClass('timepicker-hours').append($('<table>').addClass('table-condensed')), minutesView = $('<div>').addClass('timepicker-minutes').append($('<table>').addClass('table-condensed')), secondsView = $('<div>').addClass('timepicker-seconds').append($('<table>').addClass('table-condensed')), ret = [this._getTimePickerMainTemplate()]; if (this._isEnabled('h')) { ret.push(hoursView); } if (this._isEnabled('m')) { ret.push(minutesView); } if (this._isEnabled('s')) { ret.push(secondsView); } return ret; }; TempusDominusBootstrap4.prototype._getToolbar = function _getToolbar() { var row = []; if (this._options.buttons.showToday) { row.push($('<td>').append($('<a>').attr({ 'data-action': 'today', 'title': this._options.tooltips.today }).append($('<span>').addClass(this._options.icons.today)))); } if (!this._options.sideBySide && this._hasDate() && this._hasTime()) { row.push($('<td>').append($('<a>').attr({ 'data-action': 'togglePicker', 'title': this._options.tooltips.selectTime }).append($('<span>').addClass(this._options.icons.time)))); } if (this._options.buttons.showClear) { row.push($('<td>').append($('<a>').attr({ 'data-action': 'clear', 'title': this._options.tooltips.clear }).append($('<span>').addClass(this._options.icons.clear)))); } if (this._options.buttons.showClose) { row.push($('<td>').append($('<a>').attr({ 'data-action': 'close', 'title': this._options.tooltips.close }).append($('<span>').addClass(this._options.icons.close)))); } return row.length === 0 ? '' : $('<table>').addClass('table-condensed').append($('<tbody>').append($('<tr>').append(row))); }; TempusDominusBootstrap4.prototype._getTemplate = function _getTemplate() { var template = $('<div>').addClass('bootstrap-datetimepicker-widget dropdown-menu'), dateView = $('<div>').addClass('datepicker').append(this._getDatePickerTemplate()), timeView = $('<div>').addClass('timepicker').append(this._getTimePickerTemplate()), content = $('<ul>').addClass('list-unstyled'), toolbar = $('<li>').addClass('picker-switch' + (this._options.collapse ? ' accordion-toggle' : '')).append(this._getToolbar()); if (this._options.inline) { template.removeClass('dropdown-menu'); } if (this.use24Hours) { template.addClass('usetwentyfour'); } if (this._isEnabled('s') && !this.use24Hours) { template.addClass('wider'); } if (this._options.sideBySide && this._hasDate() && this._hasTime()) { template.addClass('timepicker-sbs'); if (this._options.toolbarPlacement === 'top') { template.append(toolbar); } template.append($('<div>').addClass('row').append(dateView.addClass('col-md-6')).append(timeView.addClass('col-md-6'))); if (this._options.toolbarPlacement === 'bottom' || this._options.toolbarPlacement === 'default') { template.append(toolbar); } return template; } if (this._options.toolbarPlacement === 'top') { content.append(toolbar); } if (this._hasDate()) { content.append($('<li>').addClass(this._options.collapse && this._hasTime() ? 'collapse' : '').addClass(this._options.collapse && this._hasTime() && this._options.viewMode === 'time' ? '' : 'show').append(dateView)); } if (this._options.toolbarPlacement === 'default') { content.append(toolbar); } if (this._hasTime()) { content.append($('<li>').addClass(this._options.collapse && this._hasDate() ? 'collapse' : '').addClass(this._options.collapse && this._hasDate() && this._options.viewMode === 'time' ? 'show' : '').append(timeView)); } if (this._options.toolbarPlacement === 'bottom') { content.append(toolbar); } return template.append(content); }; TempusDominusBootstrap4.prototype._place = function _place(e) { var self = e && e.data && e.data.picker || this, vertical = self._options.widgetPositioning.vertical, horizontal = self._options.widgetPositioning.horizontal, parent = void 0; var position = (self.component || self._element).position(), offset = (self.component || self._element).offset(); if (self._options.widgetParent) { parent = self._options.widgetParent.append(self.widget); } else if (self._element.is('input')) { parent = self._element.after(self.widget).parent(); } else if (self._options.inline) { parent = self._element.append(self.widget); return; } else { parent = self._element; self._element.children().first().after(self.widget); } // Top and bottom logic if (vertical === 'auto') { //noinspection JSValidateTypes if (offset.top + self.widget.height() * 1.5 >= $(window).height() + $(window).scrollTop() && self.widget.height() + self._element.outerHeight() < offset.top) { vertical = 'top'; } else { vertical = 'bottom'; } } // Left and right logic if (horizontal === 'auto') { if (parent.width() < offset.left + self.widget.outerWidth() / 2 && offset.left + self.widget.outerWidth() > $(window).width()) { horizontal = 'right'; } else { horizontal = 'left'; } } if (vertical === 'top') { self.widget.addClass('top').removeClass('bottom'); } else { self.widget.addClass('bottom').removeClass('top'); } if (horizontal === 'right') { self.widget.addClass('float-right'); } else { self.widget.removeClass('float-right'); } // find the first parent element that has a relative css positioning if (parent.css('position') !== 'relative') { parent = parent.parents().filter(function () { return $(this).css('position') === 'relative'; }).first(); } if (parent.length === 0) { throw new Error('datetimepicker component should be placed within a relative positioned container'); } self.widget.css({ top: vertical === 'top' ? 'auto' : position.top + self._element.outerHeight() + 'px', bottom: vertical === 'top' ? parent.outerHeight() - (parent === self._element ? 0 : position.top) + 'px' : 'auto', left: horizontal === 'left' ? (parent === self._element ? 0 : position.left) + 'px' : 'auto', right: horizontal === 'left' ? 'auto' : parent.outerWidth() - self._element.outerWidth() - (parent === self._element ? 0 : position.left) + 'px' }); }; TempusDominusBootstrap4.prototype._fillDow = function _fillDow() { var row = $('<tr>'), currentDate = this._viewDate.clone().startOf('w').startOf('d'); if (this._options.calendarWeeks === true) { row.append($('<th>').addClass('cw').text('#')); } while (currentDate.isBefore(this._viewDate.clone().endOf('w'))) { row.append($('<th>').addClass('dow').text(currentDate.format('dd'))); currentDate.add(1, 'd'); } this.widget.find('.datepicker-days thead').append(row); }; TempusDominusBootstrap4.prototype._fillMonths = function _fillMonths() { var spans = [], monthsShort = this._viewDate.clone().startOf('y').startOf('d'); while (monthsShort.isSame(this._viewDate, 'y')) { spans.push($('<span>').attr('data-action', 'selectMonth').addClass('month').text(monthsShort.format('MMM'))); monthsShort.add(1, 'M'); } this.widget.find('.datepicker-months td').empty().append(spans); }; TempusDominusBootstrap4.prototype._updateMonths = function _updateMonths() { var monthsView = this.widget.find('.datepicker-months'), monthsViewHeader = monthsView.find('th'), months = monthsView.find('tbody').find('span'), self = this; monthsViewHeader.eq(0).find('span').attr('title', this._options.tooltips.prevYear); monthsViewHeader.eq(1).attr('title', this._options.tooltips.selectYear); monthsViewHeader.eq(2).find('span').attr('title', this._options.tooltips.nextYear); monthsView.find('.disabled').removeClass('disabled'); if (!this._isValid(this._viewDate.clone().subtract(1, 'y'), 'y')) { monthsViewHeader.eq(0).addClass('disabled'); } monthsViewHeader.eq(1).text(this._viewDate.year()); if (!this._isValid(this._viewDate.clone().add(1, 'y'), 'y')) { monthsViewHeader.eq(2).addClass('disabled'); } months.removeClass('active'); if (this._getLastPickedDate().isSame(this._viewDate, 'y') && !this.unset) { months.eq(this._getLastPickedDate().month()).addClass('active'); } months.each(function (index) { if (!self._isValid(self._viewDate.clone().month(index), 'M')) { $(this).addClass('disabled'); } }); }; TempusDominusBootstrap4.prototype._getStartEndYear = function _getStartEndYear(factor, year) { var step = factor / 10, startYear = Math.floor(year / factor) * factor, endYear = startYear + step * 9, focusValue = Math.floor(year / step) * step; return [startYear, endYear, focusValue]; }; TempusDominusBootstrap4.prototype._updateYears = function _updateYears() { var yearsView = this.widget.find('.datepicker-years'), yearsViewHeader = yearsView.find('th'), yearCaps = this._getStartEndYear(10, this._viewDate.year()), startYear = this._viewDate.clone().year(yearCaps[0]), endYear = this._viewDate.clone().year(yearCaps[1]); var html = ''; yearsViewHeader.eq(0).find('span').attr('title', this._options.tooltips.prevDecade); yearsViewHeader.eq(1).attr('title', this._options.tooltips.selectDecade); yearsViewHeader.eq(2).find('span').attr('title', this._options.tooltips.nextDecade); yearsView.find('.disabled').removeClass('disabled'); if (this._options.minDate && this._options.minDate.isAfter(startYear, 'y')) { yearsViewHeader.eq(0).addClass('disabled'); } yearsViewHeader.eq(1).text(startYear.year() + '-' + endYear.year()); if (this._options.maxDate && this._options.maxDate.isBefore(endYear, 'y')) { yearsViewHeader.eq(2).addClass('disabled'); } html += '<span data-action="selectYear" class="year old">' + (startYear.year() - 1) + '</span>'; while (!startYear.isAfter(endYear, 'y')) { html += '<span data-action="selectYear" class="year' + (startYear.isSame(this._getLastPickedDate(), 'y') && !this.unset ? ' active' : '') + (!this._isValid(startYear, 'y') ? ' disabled' : '') + '">' + startYear.year() + '</span>'; startYear.add(1, 'y'); } html += '<span data-action="selectYear" class="year old">' + startYear.year() + '</span>'; yearsView.find('td').html(html); }; TempusDominusBootstrap4.prototype._updateDecades = function _updateDecades() { var decadesView = this.widget.find('.datepicker-decades'), decadesViewHeader = decadesView.find('th'), yearCaps = this._getStartEndYear(100, this._viewDate.year()), startDecade = this._viewDate.clone().year(yearCaps[0]), endDecade = this._viewDate.clone().year(yearCaps[1]); var minDateDecade = false, maxDateDecade = false, endDecadeYear = void 0, html = ''; decadesViewHeader.eq(0).find('span').attr('title', this._options.tooltips.prevCentury); decadesViewHeader.eq(2).find('span').attr('title', this._options.tooltips.nextCentury); decadesView.find('.disabled').removeClass('disabled'); if (startDecade.year() === 0 || this._options.minDate && this._options.minDate.isAfter(startDecade, 'y')) { decadesViewHeader.eq(0).addClass('disabled'); } decadesViewHeader.eq(1).text(startDecade.year() + '-' + endDecade.year()); if (this._options.maxDate && this._options.maxDate.isBefore(endDecade, 'y')) { decadesViewHeader.eq(2).addClass('disabled'); } if (startDecade.year() - 10 < 0) { html += '<span>&nbsp;</span>'; } else { html += '<span data-action="selectDecade" class="decade old" data-selection="' + (startDecade.year() + 6) + '">' + (startDecade.year() - 10) + '</span>'; } while (!startDecade.isAfter(endDecade, 'y')) { endDecadeYear = startDecade.year() + 11; minDateDecade = this._options.minDate && this._options.minDate.isAfter(startDecade, 'y') && this._options.minDate.year() <= endDecadeYear; maxDateDecade = this._options.maxDate && this._options.maxDate.isAfter(startDecade, 'y') && this._options.maxDate.year() <= endDecadeYear; html += '<span data-action="selectDecade" class="decade' + (this._getLastPickedDate().isAfter(startDecade) && this._getLastPickedDate().year() <= endDecadeYear ? ' active' : '') + (!this._isValid(startDecade, 'y') && !minDateDecade && !maxDateDecade ? ' disabled' : '') + '" data-selection="' + (startDecade.year() + 6) + '">' + startDecade.year() + '</span>'; startDecade.add(10, 'y'); } html += '<span data-action="selectDecade" class="decade old" data-selection="' + (startDecade.year() + 6) + '">' + startDecade.year() + '</span>'; decadesView.find('td').html(html); }; TempusDominusBootstrap4.prototype._fillDate = function _fillDate() { var daysView = this.widget.find('.datepicker-days'), daysViewHeader = daysView.find('th'), html = []; var currentDate = void 0, row = void 0, clsName = void 0, i = void 0; if (!this._hasDate()) { return; } daysViewHeader.eq(0).find('span').attr('title', this._options.tooltips.prevMonth); daysViewHeader.eq(1).attr('title', this._options.tooltips.selectMonth); daysViewHeader.eq(2).find('span').attr('title', this._options.tooltips.nextMonth); daysView.find('.disabled').removeClass('disabled'); daysViewHeader.eq(1).text(this._viewDate.format(this._options.dayViewHeaderFormat)); if (!this._isValid(this._viewDate.clone().subtract(1, 'M'), 'M')) { daysViewHeader.eq(0).addClass('disabled'); } if (!this._isValid(this._viewDate.clone().add(1, 'M'), 'M')) { daysViewHeader.eq(2).addClass('disabled'); } currentDate = this._viewDate.clone().startOf('M').startOf('w').startOf('d'); for (i = 0; i < 42; i++) { //always display 42 days (should show 6 weeks) if (currentDate.weekday() === 0) { row = $('<tr>'); if (this._options.calendarWeeks) { row.append('<td class="cw">' + currentDate.week() + '</td>'); } html.push(row); } clsName = ''; if (currentDate.isBefore(this._viewDate, 'M')) { clsName += ' old'; } if (currentDate.isAfter(this._viewDate, 'M')) { clsName += ' new'; } if (this._options.allowMultidate) { var index = this._datesFormatted.indexOf(currentDate.format('YYYY-MM-DD')); if (index !== -1) { if (currentDate.isSame(this._datesFormatted[index], 'd') && !this.unset) { clsName += ' active'; } } } else { if (currentDate.isSame(this._getLastPickedDate(), 'd') && !this.unset) { clsName += ' active'; } } if (!this._isValid(currentDate, 'd')) { clsName += ' disabled'; } if (currentDate.isSame(this.getMoment(), 'd')) { clsName += ' today'; } if (currentDate.day() === 0 || currentDate.day() === 6) { clsName += ' weekend'; } row.append('<td data-action="selectDay" data-day="' + currentDate.format('L') + '" class="day' + clsName + '">' + currentDate.date() + '</td>'); currentDate.add(1, 'd'); } daysView.find('tbody').empty().append(html); this._updateMonths(); this._updateYears(); this._updateDecades(); }; TempusDominusBootstrap4.prototype._fillHours = function _fillHours() { var table = this.widget.find('.timepicker-hours table'), currentHour = this._viewDate.clone().startOf('d'), html = []; var row = $('<tr>'); if (this._viewDate.hour() > 11 && !this.use24Hours) { currentHour.hour(12); } while (currentHour.isSame(this._viewDate, 'd') && (this.use24Hours || this._viewDate.hour() < 12 && currentHour.hour() < 12 || this._viewDate.hour() > 11)) { if (currentHour.hour() % 4 === 0) { row = $('<tr>'); html.push(row); } row.append('<td data-action="selectHour" class="hour' + (!this._isValid(currentHour, 'h') ? ' disabled' : '') + '">' + currentHour.format(this.use24Hours ? 'HH' : 'hh') + '</td>'); currentHour.add(1, 'h'); } table.empty().append(html); }; TempusDominusBootstrap4.prototype._fillMinutes = function _fillMinutes() { var table = this.widget.find('.timepicker-minutes table'), currentMinute = this._viewDate.clone().startOf('h'), html = [], step = this._options.stepping === 1 ? 5 : this._options.stepping; var row = $('<tr>'); while (this._viewDate.isSame(currentMinute, 'h')) { if (currentMinute.minute() % (step * 4) === 0) { row = $('<tr>'); html.push(row); } row.append('<td data-action="selectMinute" class="minute' + (!this._isValid(currentMinute, 'm') ? ' disabled' : '') + '">' + currentMinute.format('mm') + '</td>'); currentMinute.add(step, 'm'); } table.empty().append(html); }; TempusDominusBootstrap4.prototype._fillSeconds = function _fillSeconds() { var table = this.widget.find('.timepicker-seconds table'), currentSecond = this._viewDate.clone().startOf('m'), html = []; var row = $('<tr>'); while (this._viewDate.isSame(currentSecond, 'm')) { if (currentSecond.second() % 20 === 0) { row = $('<tr>'); html.push(row); } row.append('<td data-action="selectSecond" class="second' + (!this._isValid(currentSecond, 's') ? ' disabled' : '') + '">' + currentSecond.format('ss') + '</td>'); currentSecond.add(5, 's'); } table.empty().append(html); }; TempusDominusBootstrap4.prototype._fillTime = function _fillTime() { var toggle = void 0, newDate = void 0; var timeComponents = this.widget.find('.timepicker span[data-time-component]'); if (!this.use24Hours) { toggle = this.widget.find('.timepicker [data-action=togglePeriod]'); newDate = this._getLastPickedDate().clone().add(this._getLastPickedDate().hours() >= 12 ? -12 : 12, 'h'); toggle.text(this._getLastPickedDate().format('A')); if (this._isValid(newDate, 'h')) { toggle.removeClass('disabled'); } else { toggle.addClass('disabled'); } } timeComponents.filter('[data-time-component=hours]').text(this._getLastPickedDate().format('' + (this.use24Hours ? 'HH' : 'hh'))); timeComponents.filter('[data-time-component=minutes]').text(this._getLastPickedDate().format('mm')); timeComponents.filter('[data-time-component=seconds]').text(this._getLastPickedDate().format('ss')); this._fillHours(); this._fillMinutes(); this._fillSeconds(); }; TempusDominusBootstrap4.prototype._doAction = function _doAction(e, action) { var lastPicked = this._getLastPickedDate(); if ($(e.currentTarget).is('.disabled')) { return false; } action = action || $(e.currentTarget).data('action'); switch (action) { case 'next': { var navFnc = DateTimePicker.DatePickerModes[this.currentViewMode].NAV_FUNCTION; this._viewDate.add(DateTimePicker.DatePickerModes[this.currentViewMode].NAV_STEP, navFnc); this._fillDate(); this._viewUpdate(navFnc); break; } case 'previous': { var _navFnc = DateTimePicker.DatePickerModes[this.currentViewMode].NAV_FUNCTION; this._viewDate.subtract(DateTimePicker.DatePickerModes[this.currentViewMode].NAV_STEP, _navFnc); this._fillDate(); this._viewUpdate(_navFnc); break; } case 'pickerSwitch': this._showMode(1); break; case 'selectMonth': { var month = $(e.target).closest('tbody').find('span').index($(e.target)); this._viewDate.month(month); if (this.currentViewMode === DateTimePicker.MinViewModeNumber) { this._setValue(lastPicked.clone().year(this._viewDate.year()).month(this._viewDate.month()), this._getLastPickedDateIndex()); if (!this._options.inline) { this.hide(); } } else { this._showMode(-1); this._fillDate(); } this._viewUpdate('M'); break; } case 'selectYear': { var year = parseInt($(e.target).text(), 10) || 0; this._viewDate.year(year); if (this.currentViewMode === DateTimePicker.MinViewModeNumber) { this._setValue(lastPicked.clone().year(this._viewDate.year()), this._getLastPickedDateIndex()); if (!this._options.inline) { this.hide(); } } else { this._showMode(-1); this._fillDate(); } this._viewUpdate('YYYY'); break; } case 'selectDecade': { var _year = parseInt($(e.target).data('selection'), 10) || 0; this._viewDate.year(_year); if (this.currentViewMode === DateTimePicker.MinViewModeNumber) { this._setValue(lastPicked.clone().year(this._viewDate.year()), this._getLastPickedDateIndex()); if (!this._options.inline) { this.hide(); } } else { this._showMode(-1); this._fillDate(); } this._viewUpdate('YYYY'); break; } case 'selectDay': { var day = this._viewDate.clone(); if ($(e.target).is('.old')) { day.subtract(1, 'M'); } if ($(e.target).is('.new')) { day.add(1, 'M'); } this._setValue(day.date(parseInt($(e.target).text(), 10)), this._getLastPickedDateIndex()); if (!this._hasTime() && !this._options.keepOpen && !this._options.inline) { this.hide(); } break; } case 'incrementHours': { var newDate = lastPicked.clone().add(1, 'h'); if (this._isValid(newDate, 'h')) { this._setValue(newDate, this._getLastPickedDateIndex()); } break; } case 'incrementMinutes': { var _newDate = lastPicked.clone().add(this._options.stepping, 'm'); if (this._isValid(_newDate, 'm')) { this._setValue(_newDate, this._getLastPickedDateIndex()); } break; } case 'incrementSeconds': { var _newDate2 = lastPicked.clone().add(1, 's'); if (this._isValid(_newDate2, 's')) { this._setValue(_newDate2, this._getLastPickedDateIndex()); } break; } case 'decrementHours': { var _newDate3 = lastPicked.clone().subtract(1, 'h'); if (this._isValid(_newDate3, 'h')) { this._setValue(_newDate3, this._getLastPickedDateIndex()); } break; } case 'decrementMinutes': { var _newDate4 = lastPicked.clone().subtract(this._options.stepping, 'm'); if (this._isValid(_newDate4, 'm')) { this._setValue(_newDate4, this._getLastPickedDateIndex()); } break; } case 'decrementSeconds': { var _newDate5 = lastPicked.clone().subtract(1, 's'); if (this._isValid(_newDate5, 's')) { this._setValue(_newDate5, this._getLastPickedDateIndex()); } break; } case 'togglePeriod': { this._setValue(lastPicked.clone().add(lastPicked.hours() >= 12 ? -12 : 12, 'h'), this._getLastPickedDateIndex()); break; } case 'togglePicker': { var $this = $(e.target), $link = $this.closest('a'), $parent = $this.closest('ul'), expanded = $parent.find('.show'), closed = $parent.find('.collapse:not(.show)'), $span = $this.is('span') ? $this : $this.find('span'); var collapseData = void 0; if (expanded && expanded.length) { collapseData = expanded.data('collapse'); if (collapseData && collapseData.transitioning) { return true; } if (expanded.collapse) { // if collapse plugin is available through bootstrap.js then use it expanded.collapse('hide'); closed.collapse('show'); } else { // otherwise just toggle in class on the two views expanded.removeClass('show'); closed.addClass('show'); } $span.toggleClass(this._options.icons.time + ' ' + this._options.icons.date); if ($span.hasClass(this._options.icons.date)) { $link.attr('title', this._options.tooltips.selectDate); } else { $link.attr('title', this._options.tooltips.selectTime); } } } break; case 'showPicker': this.widget.find('.timepicker > div:not(.timepicker-picker)').hide(); this.widget.find('.timepicker .timepicker-picker').show(); break; case 'showHours': this.widget.find('.timepicker .timepicker-picker').hide(); this.widget.find('.timepicker .timepicker-hours').show(); break; case 'showMinutes': this.widget.find('.timepicker .timepicker-picker').hide(); this.widget.find('.timepicker .timepicker-minutes').show(); break; case 'showSeconds': this.widget.find('.timepicker .timepicker-picker').hide(); this.widget.find('.timepicker .timepicker-seconds').show(); break; case 'selectHour': { var hour = parseInt($(e.target).text(), 10); if (!this.use24Hours) { if (lastPicked.hours() >= 12) { if (hour !== 12) { hour += 12; } } else { if (hour === 12) { hour = 0; } } } this._setValue(lastPicked.clone().hours(hour), this._getLastPickedDateIndex()); this._doAction(e, 'showPicker'); break; } case 'selectMinute': this._setValue(lastPicked.clone().minutes(parseInt($(e.target).text(), 10)), this._getLastPickedDateIndex()); this._doAction(e, 'showPicker'); break; case 'selectSecond': this._setValue(lastPicked.clone().seconds(parseInt($(e.target).text(), 10)), this._getLastPickedDateIndex()); this._doAction(e, 'showPicker'); break; case 'clear': this.clear(); break; case 'today': { var todaysDate = this.getMoment(); if (this._isValid(todaysDate, 'd')) { this._setValue(todaysDate, this._getLastPickedDateIndex()); } break; } } return false; }; //public TempusDominusBootstrap4.prototype.hide = function hide() { var transitioning = false; if (!this.widget) { return; } // Ignore event if in the middle of a picker transition this.widget.find('.collapse').each(function () { var collapseData = $(this).data('collapse'); if (collapseData && collapseData.transitioning) { transitioning = true; return false; } return true; }); if (transitioning) { return; } if (this.component && this.component.hasClass('btn')) { this.component.toggleClass('active'); } this.widget.hide(); $(window).off('resize', this._place()); this.widget.off('click', '[data-action]'); this.widget.off('mousedown', false); this.widget.remove(); this.widget = false; this._notifyEvent({ type: DateTimePicker.Event.HIDE, date: this._getLastPickedDate().clone() }); if (this.input !== undefined) { this.input.blur(); } this._viewDate = this._getLastPickedDate().clone(); }; TempusDominusBootstrap4.prototype.show = function show() { var currentMoment = void 0; var useCurrentGranularity = { 'year': function year(m) { return m.month(0).date(1).hours(0).seconds(0).minutes(0); }, 'month': function month(m) { return m.date(1).hours(0).seconds(0).minutes(0); }, 'day': function day(m) { return m.hours(0).seconds(0).minutes(0); }, 'hour': function hour(m) { return m.seconds(0).minutes(0); }, 'minute': function minute(m) { return m.seconds(0); } }; if (this.input !== undefined) { if (this.input.prop('disabled') || !this._options.ignoreReadonly && this.input.prop('readonly') || this.widget) { return; } if (this.input.val() !== undefined && this.input.val().trim().length !== 0) { this._setValue(this._parseInputDate(this.input.val().trim()), 0); } else if (this.unset && this._options.useCurrent) { currentMoment = this.getMoment(); if (typeof this._options.useCurrent === 'string') { currentMoment = useCurrentGranularity[this._options.useCurrent](currentMoment); } this._setValue(currentMoment, 0); } } else if (this.unset && this._options.useCurrent) { currentMoment = this.getMoment(); if (typeof this._options.useCurrent === 'string') { currentMoment = useCurrentGranularity[this._options.useCurrent](currentMoment); } this._setValue(currentMoment, 0); } this.widget = this._getTemplate(); this._fillDow(); this._fillMonths(); this.widget.find('.timepicker-hours').hide(); this.widget.find('.timepicker-minutes').hide(); this.widget.find('.timepicker-seconds').hide(); this._update(); this._showMode(); $(window).on('resize', { picker: this }, this._place); this.widget.on('click', '[data-action]', $.proxy(this._doAction, this)); // this handles clicks on the widget this.widget.on('mousedown', false); if (this.component && this.component.hasClass('btn')) { this.component.toggleClass('active'); } this._place(); this.widget.show(); if (this.input !== undefined && this._options.focusOnShow && !this.input.is(':focus')) { this.input.focus(); } this._notifyEvent({ type: DateTimePicker.Event.SHOW }); }; TempusDominusBootstrap4.prototype.destroy = function destroy() { this.hide(); //todo doc off? this._element.removeData(DateTimePicker.DATA_KEY); this._element.removeData('date'); }; TempusDominusBootstrap4.prototype.disable = function disable() { this.hide(); if (this.component && this.component.hasClass('btn')) { this.component.addClass('disabled'); } if (this.input !== undefined) { this.input.prop('disabled', true); //todo disable this/comp if input is null } }; TempusDominusBootstrap4.prototype.enable = function enable() { if (this.component && this.component.hasClass('btn')) { this.component.removeClass('disabled'); } if (this.input !== undefined) { this.input.prop('disabled', false); //todo enable comp/this if input is null } }; TempusDominusBootstrap4.prototype.toolbarPlacement = function toolbarPlacement(_toolbarPlacement) { if (arguments.length === 0) { return this._options.toolbarPlacement; } if (typeof _toolbarPlacement !== 'string') { throw new TypeError('toolbarPlacement() expects a string parameter'); } if (toolbarPlacements.indexOf(_toolbarPlacement) === -1) { throw new TypeError('toolbarPlacement() parameter must be one of (' + toolbarPlacements.join(', ') + ') value'); } this._options.toolbarPlacement = _toolbarPlacement; if (this.widget) { this.hide(); this.show(); } }; TempusDominusBootstrap4.prototype.widgetPositioning = function widgetPositioning(_widgetPositioning) { if (arguments.length === 0) { return $.extend({}, this._options.widgetPositioning); } if ({}.toString.call(_widgetPositioning) !== '[object Object]') { throw new TypeError('widgetPositioning() expects an object variable'); } if (_widgetPositioning.horizontal) { if (typeof _widgetPositioning.horizontal !== 'string') { throw new TypeError('widgetPositioning() horizontal variable must be a string'); } _widgetPositioning.horizontal = _widgetPositioning.horizontal.toLowerCase(); if (horizontalModes.indexOf(_widgetPositioning.horizontal) === -1) { throw new TypeError('widgetPositioning() expects horizontal parameter to be one of (' + horizontalModes.join(', ') + ')'); } this._options.widgetPositioning.horizontal = _widgetPositioning.horizontal; } if (_widgetPositioning.vertical) { if (typeof _widgetPositioning.vertical !== 'string') { throw new TypeError('widgetPositioning() vertical variable must be a string'); } _widgetPositioning.vertical = _widgetPositioning.vertical.toLowerCase(); if (verticalModes.indexOf(_widgetPositioning.vertical) === -1) { throw new TypeError('widgetPositioning() expects vertical parameter to be one of (' + verticalModes.join(', ') + ')'); } this._options.widgetPositioning.vertical = _widgetPositioning.vertical; } this._update(); }; TempusDominusBootstrap4.prototype.widgetParent = function widgetParent(_widgetParent) { if (arguments.length === 0) { return this._options.widgetParent; } if (typeof _widgetParent === 'string') { _widgetParent = $(_widgetParent); } if (_widgetParent !== null && typeof _widgetParent !== 'string' && !(_widgetParent instanceof $)) { throw new TypeError('widgetParent() expects a string or a jQuery object parameter'); } this._options.widgetParent = _widgetParent; if (this.widget) { this.hide(); this.show(); } }; //static TempusDominusBootstrap4._jQueryHandleThis = function _jQueryHandleThis(me, option, argument) { var data = $(me).data(DateTimePicker.DATA_KEY); if ((typeof option === 'undefined' ? 'undefined' : _typeof(option)) === 'object') { $.extend({}, DateTimePicker.Default, option); } if (!data) { data = new TempusDominusBootstrap4($(me), option); $(me).data(DateTimePicker.DATA_KEY, data); } if (typeof option === 'string') { if (data[option] === undefined) { throw new Error('No method named "' + option + '"'); } if (argument === undefined) { return data[option](); } else { return data[option](argument); } } }; TempusDominusBootstrap4._jQueryInterface = function _jQueryInterface(option, argument) { if (this.length === 1) { return TempusDominusBootstrap4._jQueryHandleThis(this[0], option, argument); } return this.each(function () { TempusDominusBootstrap4._jQueryHandleThis(this, option, argument); }); }; return TempusDominusBootstrap4; }(DateTimePicker); /** * ------------------------------------------------------------------------ * jQuery * ------------------------------------------------------------------------ */ $(document).on(DateTimePicker.Event.CLICK_DATA_API, DateTimePicker.Selector.DATA_TOGGLE, function () { var $target = getSelectorFromElement($(this)); if ($target.length === 0) { return; } TempusDominusBootstrap4._jQueryInterface.call($target, 'toggle'); }).on(DateTimePicker.Event.CHANGE, '.' + DateTimePicker.ClassName.INPUT, function (event) { var $target = getSelectorFromElement($(this)); if ($target.length === 0) { return; } TempusDominusBootstrap4._jQueryInterface.call($target, '_change', event); }).on(DateTimePicker.Event.BLUR, '.' + DateTimePicker.ClassName.INPUT, function (event) { var $target = getSelectorFromElement($(this)), config = $target.data(DateTimePicker.DATA_KEY); if ($target.length === 0) { return; } if (config._options.debug || window.debug) { return; } TempusDominusBootstrap4._jQueryInterface.call($target, 'hide', event); }).on(DateTimePicker.Event.KEYDOWN, '.' + DateTimePicker.ClassName.INPUT, function (event) { var $target = getSelectorFromElement($(this)); if ($target.length === 0) { return; } TempusDominusBootstrap4._jQueryInterface.call($target, '_keydown', event); }).on(DateTimePicker.Event.KEYUP, '.' + DateTimePicker.ClassName.INPUT, function (event) { var $target = getSelectorFromElement($(this)); if ($target.length === 0) { return; } TempusDominusBootstrap4._jQueryInterface.call($target, '_keyup', event); }).on(DateTimePicker.Event.FOCUS, '.' + DateTimePicker.ClassName.INPUT, function (event) { var $target = getSelectorFromElement($(this)), config = $target.data(DateTimePicker.DATA_KEY); if ($target.length === 0) { return; } if (!config._options.allowInputToggle) { return; } TempusDominusBootstrap4._jQueryInterface.call($target, config, event); }); $.fn[DateTimePicker.NAME] = TempusDominusBootstrap4._jQueryInterface; $.fn[DateTimePicker.NAME].Constructor = TempusDominusBootstrap4; $.fn[DateTimePicker.NAME].noConflict = function () { $.fn[DateTimePicker.NAME] = JQUERY_NO_CONFLICT; return TempusDominusBootstrap4._jQueryInterface; }; return TempusDominusBootstrap4; }(jQuery); }();
sashberd/cdnjs
ajax/libs/tempusdominus-bootstrap-4/5.0.0-alpha13/js/tempusdominus-bootstrap-4.js
JavaScript
mit
114,603
/** * A network library for processing which supports UDP, TCP and Multicast. * * (c) 2004-2011 * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General * Public License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place, Suite 330, * Boston, MA 02111-1307 USA * * @author Andreas Schlegel http://www.sojamo.de/libraries/oscP5 * @modified 12/19/2011 * @version 0.9.8 */ package netP5; import java.net.DatagramPacket; import java.util.Vector; /** * * @author andreas schlegel * */ public class UdpServer extends AbstractUdpServer implements UdpPacketListener { protected Object _myParent; protected NetPlug _myNetPlug; /** * new UDP server. * by default the buffersize of a udp packet is 1536 bytes. you can set * your own individual buffersize with the third parameter int in the constructor. * @param theObject Object * @param thePort int * @param theBufferSize int */ public UdpServer( final Object theObject, final int thePort, final int theBufferSize) { super(null, thePort, theBufferSize); _myParent = theObject; _myListener = this; _myNetPlug = new NetPlug(_myParent); start(); } public UdpServer( final Object theObject, final int thePort) { super(null, thePort, 1536); _myParent = theObject; _myListener = this; _myNetPlug = new NetPlug(_myParent); start(); } /** * @invisible * @param theListener * @param thePort * @param theBufferSize */ public UdpServer( final UdpPacketListener theListener, final int thePort, final int theBufferSize) { super(theListener, thePort, theBufferSize); } /** * @invisible * @param theListener * @param theAddress * @param thePort * @param theBufferSize */ protected UdpServer( final UdpPacketListener theListener, final String theAddress, final int thePort, final int theBufferSize) { super(theListener, theAddress, thePort, theBufferSize); } /** * @invisible * @param thePacket DatagramPacket * @param thePort int */ public void process(DatagramPacket thePacket, int thePort) { _myNetPlug.process(thePacket,thePort); } /** * add a listener to the udp server. each incoming packet will be forwarded * to the listener. * @param theListener * @related NetListener */ public void addListener(NetListener theListener) { _myNetPlug.addListener(theListener); } /** * * @param theListener * @related NetListener */ public void removeListener(NetListener theListener) { _myNetPlug.removeListener(theListener); } /** * * @param theIndex * @related NetListener * @return */ public NetListener getListener(int theIndex) { return _myNetPlug.getListener(theIndex); } /** * @related NetListener * @return */ public Vector getListeners() { return _myNetPlug.getListeners(); } }
Avnerus/pulse-midburn
oscP5/src/netP5/UdpServer.java
Java
mit
3,640
/** * Copyright (c) 2010-2018 by the respective copyright holders. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.openhab.binding.innogysmarthome.internal.listener; import org.openhab.binding.innogysmarthome.internal.InnogyWebSocket; /** * The {@link EventListener} is called by the {@link InnogyWebSocket} on new Events and if the {@link InnogyWebSocket} * closed the connection. * * @author Oliver Kuhl - Initial contribution */ public interface EventListener { /** * This method is called, whenever a new event comes from the innogy service (like a device change for example). * * @param msg */ public void onEvent(String msg); /** * This method is called, when the evenRunner stops abnormally (statuscode <> 1000). */ public void connectionClosed(); }
johannrichard/openhab2-addons
addons/binding/org.openhab.binding.innogysmarthome/src/main/java/org/openhab/binding/innogysmarthome/internal/listener/EventListener.java
Java
epl-1.0
1,030
from sys import * from pdflib_py import * p = PDF_new() PDF_open_file(p, "gradients.pdf") PDF_set_parameter(p, "usercoordinates", "true") PDF_set_value(p, "compress", 0) PDF_set_info(p, "Author", "pdflib") PDF_set_info(p, "Creator", "pdflib_py") PDF_set_info(p, "Title", "gradients") width = 1024 height = 800 PDF_begin_page(p, width, height) type,x,params = "radial",0,"r0=0 r1=320" y = 0 PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.0, 1.0) shading = PDF_shading(p, type, 160+x,160+y, 160+x, 160+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial pattern = PDF_shading_pattern(p,shading,"") PDF_setcolor(p, "fill", "pattern", pattern,0,0,0) PDF_moveto(p, x,y) PDF_curveto(p, x+80, y+80, x+80, y+240, x, y+320) PDF_curveto(p, x+80, y+240, x+240, y+240, x+320, y+320) PDF_curveto(p, x+240, y+240, x+240, y+80, x+320, y) PDF_curveto(p, x+240, y+80, x+80, y+80, x, y) PDF_fill(p) PDF_moveto(p, x,y) PDF_curveto(p, x+80, y+80, x+80, y+240, x, y+320) PDF_curveto(p, x+80, y+240, x+240, y+240, x+320, y+320) PDF_curveto(p, x+240, y+240, x+240, y+80, x+320, y) PDF_curveto(p, x+240, y+80, x+80, y+80, x, y) PDF_stroke(p) type,x,params = "axial",200,"" y = 0 PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.4, 1.0) shading = PDF_shading(p, type, 0+x,0+y, 320+x,320+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial pattern = PDF_shading_pattern(p,shading,"") PDF_setcolor(p, "fill", "pattern", pattern,0,0,0) PDF_moveto(p, x,y) PDF_curveto(p, x+80, y+80, x+80, y+240, x, y+320) PDF_curveto(p, x+80, y+240, x+240, y+240, x+320, y+320) PDF_curveto(p, x+240, y+240, x+240, y+80, x+320, y) PDF_curveto(p, x+240, y+80, x+80, y+80, x, y) PDF_fill(p) PDF_moveto(p, x,y) PDF_curveto(p, x+80, y+80, x+80, y+240, x, y+320) PDF_curveto(p, x+80, y+240, x+240, y+240, x+320, y+320) PDF_curveto(p, x+240, y+240, x+240, y+80, x+320, y) PDF_curveto(p, x+240, y+80, x+80, y+80, x, y) PDF_stroke(p) type,x,params = "radial",500,"r0=0 r1=220" y = 0 PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.4, 1.0) shading = PDF_shading(p, type, 120+x, 340+y, 120+x, 340+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial pattern = PDF_shading_pattern(p,shading,"") PDF_setcolor(p, "fill", "pattern", pattern,0,0,0) PDF_moveto(p, x+80, y+80) PDF_lineto(p, x+80, y+640) PDF_lineto(p, x+160, y+640) PDF_lineto(p, x+160, y+80) PDF_lineto(p, x+80, y+80) PDF_fill(p) PDF_moveto(p, x+80, y+80) PDF_lineto(p, x+80, y+640) PDF_lineto(p, x+160, y+640) PDF_lineto(p, x+160, y+80) PDF_lineto(p, x+80, y+80) PDF_stroke(p) type,x,params = "axial",600,"" y = 0 PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.4, 1.0) shading = PDF_shading(p, type, 80+x, 80+y, 80+x, 640+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial pattern = PDF_shading_pattern(p,shading,"") PDF_setcolor(p, "fill", "pattern", pattern,0,0,0) PDF_moveto(p, x+80, y+80) PDF_lineto(p, x+80, y+640) PDF_lineto(p, x+160, y+640) PDF_lineto(p, x+160, y+80) PDF_lineto(p, x+80, y+80) PDF_fill(p) PDF_moveto(p, x+80, y+80) PDF_lineto(p, x+80, y+640) PDF_lineto(p, x+160, y+640) PDF_lineto(p, x+160, y+80) PDF_lineto(p, x+80, y+80) PDF_stroke(p) type,x,params = "axial",50,"" y = 300 PDF_setcolor(p, "fill", "rgb", 0.0, 0.0, 0.4, 1.0) shading = PDF_shading(p, type, 80+x, 80+y, 400+x, 80+y, 1.0, 1.0, 1.0, 1.0, params) #axial|radial pattern = PDF_shading_pattern(p,shading,"") PDF_setcolor(p, "fill", "pattern", pattern,0,0,0) PDF_moveto(p, x+80, y+80) PDF_lineto(p, x+80, y+160) PDF_lineto(p, x+400, y+160) PDF_lineto(p, x+400, y+80) PDF_lineto(p, x+80, y+80) PDF_fill(p) PDF_moveto(p, x+80, y+80) PDF_lineto(p, x+80, y+160) PDF_lineto(p, x+400, y+160) PDF_lineto(p, x+400, y+80) PDF_lineto(p, x+80, y+80) PDF_stroke(p) PDF_end_page(p) PDF_close(p) PDF_delete(p);
brad/swftools
spec/gradients.py
Python
gpl-2.0
3,650
<?php /** * Types_Page_Hidden_Helper * * @since 2.0 */ class Types_Page_Hidden_Helper extends Types_Page_Abstract { private static $instance; private $redirect_url = false; public static function get_instance() { if( null == self::$instance ) { self::$instance = new self(); self::$instance->add_sneaky_hidden_helper(); } } public function add_sneaky_hidden_helper() { add_submenu_page( 'options.php', // hidden $this->get_title(), $this->get_title(), $this->get_required_capability(), $this->get_page_name(), array( $this, $this->get_load_callback() ) ); } public function get_title() { return 'Loading...'; } public function get_render_callback() { return null; } public function get_load_callback() { return 'route'; } public function get_page_name() { return Types_Admin_Menu::PAGE_NAME_HELPER; } public function get_required_capability() { return 'manage_options'; } public function route() { $this->redirect_url = false; if( isset( $_GET['action'] ) && isset( $_GET['type'] ) ) { switch( $_GET['action'] ) { case 'new-form': $this->redirect_url = $this->new_form_action( $_GET['type'] ); break; case 'new-view': $this->redirect_url = $this->new_view_action( $_GET['type'] ); break; case 'new-layout-template': $this->redirect_url = $this->new_layout_template_action( $_GET['type'] ); break; case 'new-content-template': $this->redirect_url = $this->new_content_template_action( $_GET['type'] ); break; case 'new-wordpress-archive': $this->redirect_url = $this->new_wordpress_archive_action( $_GET['type'] ); break; case 'new-post-field-group': $this->redirect_url = $this->new_post_field_group_action( $_GET['type'] ); break; } } $this->redirect_url = $this->add_params_to_url( $this->redirect_url ); $this->redirect(); } private function new_form_action( $type ) { $new_form = new Types_Helper_Create_Form(); if( $id = $new_form->for_post( $type ) ) { return get_edit_post_link( $id, 'Please WordPress, be so nice and do not encode &.' ); } return false; } private function new_view_action( $type ) { $new_view = new Types_Helper_Create_View(); if( $id = $new_view->for_post( $type ) ) { return admin_url() . 'admin.php?page=views-editor&view_id='.$id; } return false; } private function new_layout_template_action( $type ) { $new_layout = new Types_Helper_Create_Layout(); if( $id = $new_layout->for_post( $type ) ) { return admin_url() . 'admin.php?page=dd_layouts_edit&action=edit&layout_id='.$id; } return false; } private function new_content_template_action( $type ) { $new_layout = new Types_Helper_Create_Content_Template(); if( $id = $new_layout->for_post( $type ) ) { return admin_url() . 'admin.php?page=ct-editor&ct_id='.$id; } return false; } private function new_wordpress_archive_action( $type ) { $new_wordpress_archive = new Types_Helper_Create_Wordpress_Archive(); if( $id = $new_wordpress_archive->for_post( $type ) ) { return admin_url() . 'admin.php?page=view-archives-editor&view_id='.$id; } return false; } private function new_post_field_group_action( $type ) { $type_object = get_post_type_object( $type ); $title = sprintf( __( 'Field Group for %s', 'types' ), $type_object->labels->name ); $name = sanitize_title( $title ); $new_post_field_group = Types_Field_Group_Post_Factory::get_instance()->create( $name, $title, 'publish' ); if( ! $new_post_field_group ) return false; $new_post_field_group->assign_post_type( $type ); $url = isset( $_GET['ref'] ) ? 'admin.php?page=wpcf-edit&group_id='.$new_post_field_group->get_id().'&ref='.$_GET['ref'] : 'admin.php?page=wpcf-edit&group_id='.$new_post_field_group->get_id(); return admin_url( $url ); } private function add_params_to_url( $url ) { // forward parameter toolset_help_video if( isset( $_GET['toolset_help_video'] ) ) $url = add_query_arg( 'toolset_help_video', $_GET['toolset_help_video'], $url ); // forward parameter ref if( isset( $_GET['ref'] ) ) $url = add_query_arg( 'ref', $_GET['ref'], $url ); return $url; } /** * hidden page, but only when redirect after doing what we have to do */ private function redirect() { // shouldn't happen but if we have no redirect_url here: goto admin main page. if( ! $this->redirect_url ) $this->redirect_url = admin_url(); die( '<script type="text/javascript">'.'window.location = "' . $this->redirect_url . '";'.'</script>' ); } }
Analytical-Engine-Interactive/loopylogic
wp-content/plugins/types/application/controllers/page/hidden/helper.php
PHP
gpl-2.0
4,598
// Copyright (C) 2015 Conrad Sanderson // Copyright (C) 2015 NICTA (www.nicta.com.au) // // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. //! \addtogroup spdiagview //! @{ //! Class for storing data required to extract and set the diagonals of a sparse matrix template<typename eT> class spdiagview : public Base<eT, spdiagview<eT> > { public: typedef eT elem_type; typedef typename get_pod_type<eT>::result pod_type; arma_aligned const SpMat<eT>& m; static const bool is_row = false; static const bool is_col = true; const uword row_offset; const uword col_offset; const uword n_rows; // equal to n_elem const uword n_elem; static const uword n_cols = 1; protected: arma_inline spdiagview(const SpMat<eT>& in_m, const uword in_row_offset, const uword in_col_offset, const uword len); public: inline ~spdiagview(); inline void operator=(const spdiagview& x); inline void operator+=(const eT val); inline void operator-=(const eT val); inline void operator*=(const eT val); inline void operator/=(const eT val); template<typename T1> inline void operator= (const Base<eT,T1>& x); template<typename T1> inline void operator+=(const Base<eT,T1>& x); template<typename T1> inline void operator-=(const Base<eT,T1>& x); template<typename T1> inline void operator%=(const Base<eT,T1>& x); template<typename T1> inline void operator/=(const Base<eT,T1>& x); template<typename T1> inline void operator= (const SpBase<eT,T1>& x); template<typename T1> inline void operator+=(const SpBase<eT,T1>& x); template<typename T1> inline void operator-=(const SpBase<eT,T1>& x); template<typename T1> inline void operator%=(const SpBase<eT,T1>& x); template<typename T1> inline void operator/=(const SpBase<eT,T1>& x); inline eT at_alt (const uword ii) const; inline SpValProxy< SpMat<eT> > operator[](const uword ii); inline eT operator[](const uword ii) const; inline SpValProxy< SpMat<eT> > at(const uword ii); inline eT at(const uword ii) const; inline SpValProxy< SpMat<eT> > operator()(const uword ii); inline eT operator()(const uword ii) const; inline SpValProxy< SpMat<eT> > at(const uword in_n_row, const uword); inline eT at(const uword in_n_row, const uword) const; inline SpValProxy< SpMat<eT> > operator()(const uword in_n_row, const uword in_n_col); inline eT operator()(const uword in_n_row, const uword in_n_col) const; inline void fill(const eT val); inline void zeros(); inline void ones(); inline void randu(); inline void randn(); inline static void extract(Mat<eT>& out, const spdiagview& in); private: friend class SpMat<eT>; spdiagview(); }; //! @}
srinix07/pairing_3p2
lib/armadillo_5_1/include/armadillo_bits/spdiagview_bones.hpp
C++
gpl-2.0
3,101
<?php namespace TYPO3\CMS\Core\Log\Writer; /** * This file is part of the TYPO3 CMS project. * * It is free software; you can redistribute it and/or modify it under * the terms of the GNU General Public License, either version 2 * of the License, or any later version. * * For the full copyright and license information, please read the * LICENSE.txt file that was distributed with this source code. * * The TYPO3 project - inspiring people to share! */ /** * Abstract implementation of a log writer * * @author Ingo Renner <ingo@typo3.org> */ abstract class AbstractWriter implements \TYPO3\CMS\Core\Log\Writer\WriterInterface { /** * Constructs this log writer * * @param array $options Configuration options - depends on the actual log writer * @throws \InvalidArgumentException */ public function __construct(array $options = array()) { foreach ($options as $optionKey => $optionValue) { $methodName = 'set' . ucfirst($optionKey); if (method_exists($this, $methodName)) { $this->{$methodName}($optionValue); } else { throw new \InvalidArgumentException('Invalid log writer option "' . $optionKey . '" for log writer of type "' . get_class($this) . '"', 1321696152); } } } }
TimboDynamite/TYPO3-Testprojekt
typo3/sysext/core/Classes/Log/Writer/AbstractWriter.php
PHP
gpl-2.0
1,233
/* * Copyright (c) 2011, 2015, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package org.graalvm.compiler.hotspot.replacements; import java.lang.reflect.Method; import java.util.EnumMap; import org.graalvm.compiler.api.directives.GraalDirectives; import org.graalvm.compiler.api.replacements.Snippet; import org.graalvm.compiler.debug.GraalError; import org.graalvm.compiler.hotspot.replacements.arraycopy.ArrayCopyCallNode; import org.graalvm.compiler.nodes.java.DynamicNewArrayNode; import org.graalvm.compiler.nodes.java.NewArrayNode; import org.graalvm.compiler.replacements.Snippets; import jdk.vm.ci.meta.JavaKind; public class ObjectCloneSnippets implements Snippets { public static final EnumMap<JavaKind, Method> arrayCloneMethods = new EnumMap<>(JavaKind.class); static { arrayCloneMethods.put(JavaKind.Boolean, getCloneMethod("booleanArrayClone", boolean[].class)); arrayCloneMethods.put(JavaKind.Byte, getCloneMethod("byteArrayClone", byte[].class)); arrayCloneMethods.put(JavaKind.Char, getCloneMethod("charArrayClone", char[].class)); arrayCloneMethods.put(JavaKind.Short, getCloneMethod("shortArrayClone", short[].class)); arrayCloneMethods.put(JavaKind.Int, getCloneMethod("intArrayClone", int[].class)); arrayCloneMethods.put(JavaKind.Float, getCloneMethod("floatArrayClone", float[].class)); arrayCloneMethods.put(JavaKind.Long, getCloneMethod("longArrayClone", long[].class)); arrayCloneMethods.put(JavaKind.Double, getCloneMethod("doubleArrayClone", double[].class)); arrayCloneMethods.put(JavaKind.Object, getCloneMethod("objectArrayClone", Object[].class)); } private static Method getCloneMethod(String name, Class<?> param) { try { return ObjectCloneSnippets.class.getDeclaredMethod(name, param); } catch (SecurityException | NoSuchMethodException e) { throw new GraalError(e); } } @Snippet public static boolean[] booleanArrayClone(boolean[] src) { boolean[] result = (boolean[]) NewArrayNode.newUninitializedArray(Boolean.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Boolean); return result; } @Snippet public static byte[] byteArrayClone(byte[] src) { byte[] result = (byte[]) NewArrayNode.newUninitializedArray(Byte.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Byte); return result; } @Snippet public static short[] shortArrayClone(short[] src) { short[] result = (short[]) NewArrayNode.newUninitializedArray(Short.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Short); return result; } @Snippet public static char[] charArrayClone(char[] src) { char[] result = (char[]) NewArrayNode.newUninitializedArray(Character.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Char); return result; } @Snippet public static int[] intArrayClone(int[] src) { int[] result = (int[]) NewArrayNode.newUninitializedArray(Integer.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Int); return result; } @Snippet public static float[] floatArrayClone(float[] src) { float[] result = (float[]) NewArrayNode.newUninitializedArray(Float.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Float); return result; } @Snippet public static long[] longArrayClone(long[] src) { long[] result = (long[]) NewArrayNode.newUninitializedArray(Long.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Long); return result; } @Snippet public static double[] doubleArrayClone(double[] src) { double[] result = (double[]) NewArrayNode.newUninitializedArray(Double.TYPE, src.length); ArrayCopyCallNode.disjointArraycopy(src, 0, result, 0, src.length, JavaKind.Double); return result; } @Snippet public static Object[] objectArrayClone(Object[] src) { /* Since this snippet is lowered early the array must be initialized */ Object[] result = (Object[]) DynamicNewArrayNode.newArray(GraalDirectives.guardingNonNull(src.getClass().getComponentType()), src.length, JavaKind.Object); ArrayCopyCallNode.disjointUninitializedArraycopy(src, 0, result, 0, src.length, JavaKind.Object); return result; } }
YouDiSN/OpenJDK-Research
jdk9/hotspot/src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.hotspot/src/org/graalvm/compiler/hotspot/replacements/ObjectCloneSnippets.java
Java
gpl-2.0
5,679
<?php /** * This file is part of the ramsey/uuid library * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. * * @copyright Copyright (c) Ben Ramsey <ben@benramsey.com> * @license http://opensource.org/licenses/MIT MIT * @link https://benramsey.com/projects/ramsey-uuid/ Documentation * @link https://packagist.org/packages/ramsey/uuid Packagist * @link https://github.com/ramsey/uuid GitHub */ namespace Ramsey\Uuid\Provider\Node; use Ramsey\Uuid\Provider\NodeProviderInterface; /** * FallbackNodeProvider attempts to gain the system host ID from an array of * providers, falling back to the next in line in the event a host ID can not be * obtained */ class FallbackNodeProvider implements NodeProviderInterface { /** * @var NodeProviderInterface[] */ private $nodeProviders; /** * Constructs a `FallbackNodeProvider` using an array of node providers * * @param NodeProviderInterface[] $providers Array of node providers */ public function __construct(array $providers) { $this->nodeProviders = $providers; } /** * Returns the system node ID by iterating over an array of node providers * and returning the first non-empty value found * * @return string System node ID as a hexadecimal string * @throws \Exception */ public function getNode() { foreach ($this->nodeProviders as $provider) { if ($node = $provider->getNode()) { return $node; } } return null; } }
matrix-msu/kora
vendor/ramsey/uuid/src/Provider/Node/FallbackNodeProvider.php
PHP
gpl-2.0
1,632
## # This file is part of WhatWeb and may be subject to # redistribution and commercial restrictions. Please see the WhatWeb # web site for more information on licensing and terms of use. # http://www.morningstarsecurity.com/research/whatweb ## Plugin.define "SQLiteManager" do author "Brendan Coles <bcoles@gmail.com>" # 2012-01-14 version "0.1" description "SQLiteManager - Web-based SQLite administration - Homepage: http://www.sqlitemanager.org" # Google results as at 2012-01-14 # # 33 for intitle:"SQLite version" "Welcome to SQLiteManager version" # 26 for inurl:"main.php?dbsel=" # Dorks # dorks [ 'intitle:"SQLite version" "Welcome to SQLiteManager version"' ] # Matches # matches [ # HTML Comments { :text=>'<!-- SQLiteFunctionProperties.class.php : propView() -->' }, { :text=>'<!-- common.lib.php : displayMenuTitle() -->' }, # Form { :text=>'<td style="white-space: nowrap"> <form name="database" action="main.php" enctype="multipart/form-data" method="POST" onSubmit="checkPath();" target="main">' }, # h2 class="sqlmVersion" { :text=>'<h2 class="sqlmVersion">Database : <a href="main.php?dbsel=' }, # Title # SQLite Version Detection { :string=>/<title>(SQLite version [\d\.\s-]+)(undefined)?<\/title>/ }, # h2 class="sqlmVersion" # Version Detection { :version=>/<h2 class="sqlmVersion">Welcome to <a href="http:\/\/www\.sqlitemanager\.org" target="_blank">SQLiteManager<\/a> version ([^\s^>]+)<\/h2>/ }, # h4 class="serverInfo" # SQLite Version Detection { :string=>/<h4 class="serverInfo">(SQLite version [\d\.\s-]+)(undefined)? \/ PHP version 5.2.17<\/h4>/ }, # h4 class="serverInfo" # SQLite Version Detection { :string=>/<h4 class="serverInfo">SQLite version [\d\.\s-]+(undefined)? \/ (PHP version [^\s^<]+)<\/h4>/, :offset=>1 }, ] end
tempbottle/WhatWeb
plugins/SQLiteManager.rb
Ruby
gpl-2.0
1,772
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@magentocommerce.com so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magentocommerce.com for more information. * * @category Mage * @package Mage_Paypal * @copyright Copyright (c) 2012 Magento Inc. (http://www.magentocommerce.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ /** * PayPal Standard payment "form" */ class Mage_Paypal_Block_Express_Form extends Mage_Paypal_Block_Standard_Form { /** * Payment method code * @var string */ protected $_methodCode = Mage_Paypal_Model_Config::METHOD_WPP_EXPRESS; /** * Set template and redirect message */ protected function _construct() { $result = parent::_construct(); $this->setRedirectMessage(Mage::helper('paypal')->__('You will be redirected to the PayPal website.')); return $result; } /** * Set data to block * * @return Mage_Core_Block_Abstract */ protected function _beforeToHtml() { $customerId = Mage::getSingleton('customer/session')->getCustomerId(); if (Mage::helper('paypal')->shouldAskToCreateBillingAgreement($this->_config, $customerId) && $this->canCreateBillingAgreement()) { $this->setCreateBACode(Mage_Paypal_Model_Express_Checkout::PAYMENT_INFO_TRANSPORT_BILLING_AGREEMENT); } return parent::_beforeToHtml(); } }
keegan2149/magento
sites/default/app/code/core/Mage/Paypal/Block/Express/Form.php
PHP
gpl-2.0
2,041
/* Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or http://ckeditor.com/license */ CKEDITOR.plugins.setLang( 'elementspath', 'si', { eleLabel: 'මුලද්‍රව්‍ය මාර්ගය', eleTitle: '%1 මුල' } );
gmuro/dolibarr
htdocs/includes/ckeditor/ckeditor/_source/plugins/elementspath/lang/si.js
JavaScript
gpl-3.0
291
/* * * (c) 2010-2019 Torstein Honsi * * License: www.highcharts.com/license */ 'use strict'; import H from './Globals.js'; import './Utilities.js'; import './Options.js'; import './Series.js'; var pick = H.pick, seriesType = H.seriesType; /** * Spline series type. * * @private * @class * @name Highcharts.seriesTypes.spline * * @augments Highcarts.Series */ seriesType( 'spline', 'line', /** * A spline series is a special type of line series, where the segments * between the data points are smoothed. * * @sample {highcharts} highcharts/demo/spline-irregular-time/ * Spline chart * @sample {highstock} stock/demo/spline/ * Spline chart * * @extends plotOptions.series * @excluding step * @product highcharts highstock * @optionparent plotOptions.spline */ { }, /** @lends seriesTypes.spline.prototype */ { /** * Get the spline segment from a given point's previous neighbour to the * given point. * * @private * @function Highcharts.seriesTypes.spline#getPointSpline * * @param {Array<Highcharts.Point>} * * @param {Highcharts.Point} point * * @param {number} i * * @return {Highcharts.SVGPathArray} */ getPointSpline: function (points, point, i) { var // 1 means control points midway between points, 2 means 1/3 // from the point, 3 is 1/4 etc smoothing = 1.5, denom = smoothing + 1, plotX = point.plotX, plotY = point.plotY, lastPoint = points[i - 1], nextPoint = points[i + 1], leftContX, leftContY, rightContX, rightContY, ret; function doCurve(otherPoint) { return otherPoint && !otherPoint.isNull && otherPoint.doCurve !== false && !point.isCliff; // #6387, area splines next to null } // Find control points if (doCurve(lastPoint) && doCurve(nextPoint)) { var lastX = lastPoint.plotX, lastY = lastPoint.plotY, nextX = nextPoint.plotX, nextY = nextPoint.plotY, correction = 0; leftContX = (smoothing * plotX + lastX) / denom; leftContY = (smoothing * plotY + lastY) / denom; rightContX = (smoothing * plotX + nextX) / denom; rightContY = (smoothing * plotY + nextY) / denom; // Have the two control points make a straight line through main // point if (rightContX !== leftContX) { // #5016, division by zero correction = ( ((rightContY - leftContY) * (rightContX - plotX)) / (rightContX - leftContX) + plotY - rightContY ); } leftContY += correction; rightContY += correction; // to prevent false extremes, check that control points are // between neighbouring points' y values if (leftContY > lastY && leftContY > plotY) { leftContY = Math.max(lastY, plotY); // mirror of left control point rightContY = 2 * plotY - leftContY; } else if (leftContY < lastY && leftContY < plotY) { leftContY = Math.min(lastY, plotY); rightContY = 2 * plotY - leftContY; } if (rightContY > nextY && rightContY > plotY) { rightContY = Math.max(nextY, plotY); leftContY = 2 * plotY - rightContY; } else if (rightContY < nextY && rightContY < plotY) { rightContY = Math.min(nextY, plotY); leftContY = 2 * plotY - rightContY; } // record for drawing in next point point.rightContX = rightContX; point.rightContY = rightContY; } // Visualize control points for debugging /* if (leftContX) { this.chart.renderer.circle( leftContX + this.chart.plotLeft, leftContY + this.chart.plotTop, 2 ) .attr({ stroke: 'red', 'stroke-width': 2, fill: 'none', zIndex: 9 }) .add(); this.chart.renderer.path(['M', leftContX + this.chart.plotLeft, leftContY + this.chart.plotTop, 'L', plotX + this.chart.plotLeft, plotY + this.chart.plotTop]) .attr({ stroke: 'red', 'stroke-width': 2, zIndex: 9 }) .add(); } if (rightContX) { this.chart.renderer.circle( rightContX + this.chart.plotLeft, rightContY + this.chart.plotTop, 2 ) .attr({ stroke: 'green', 'stroke-width': 2, fill: 'none', zIndex: 9 }) .add(); this.chart.renderer.path(['M', rightContX + this.chart.plotLeft, rightContY + this.chart.plotTop, 'L', plotX + this.chart.plotLeft, plotY + this.chart.plotTop]) .attr({ stroke: 'green', 'stroke-width': 2, zIndex: 9 }) .add(); } // */ ret = [ 'C', pick(lastPoint.rightContX, lastPoint.plotX), pick(lastPoint.rightContY, lastPoint.plotY), pick(leftContX, plotX), pick(leftContY, plotY), plotX, plotY ]; // reset for updating series later lastPoint.rightContX = lastPoint.rightContY = null; return ret; } } ); /** * A `spline` series. If the [type](#series.spline.type) option is * not specified, it is inherited from [chart.type](#chart.type). * * @extends series,plotOptions.spline * @excluding dataParser, dataURL, step * @product highcharts highstock * @apioption series.spline */ /** * An array of data points for the series. For the `spline` series type, * points can be given in the following ways: * * 1. An array of numerical values. In this case, the numerical values will be * interpreted as `y` options. The `x` values will be automatically * calculated, either starting at 0 and incremented by 1, or from * `pointStart` and `pointInterval` given in the series options. If the axis * has categories, these will be used. Example: * ```js * data: [0, 5, 3, 5] * ``` * * 2. An array of arrays with 2 values. In this case, the values correspond to * `x,y`. If the first value is a string, it is applied as the name of the * point, and the `x` value is inferred. * ```js * data: [ * [0, 9], * [1, 2], * [2, 8] * ] * ``` * * 3. An array of objects with named values. The following snippet shows only a * few settings, see the complete options set below. If the total number of * data points exceeds the series' * [turboThreshold](#series.spline.turboThreshold), this option is not * available. * ```js * data: [{ * x: 1, * y: 9, * name: "Point2", * color: "#00FF00" * }, { * x: 1, * y: 0, * name: "Point1", * color: "#FF00FF" * }] * ``` * * @sample {highcharts} highcharts/chart/reflow-true/ * Numerical values * @sample {highcharts} highcharts/series/data-array-of-arrays/ * Arrays of numeric x and y * @sample {highcharts} highcharts/series/data-array-of-arrays-datetime/ * Arrays of datetime x and y * @sample {highcharts} highcharts/series/data-array-of-name-value/ * Arrays of point.name and y * @sample {highcharts} highcharts/series/data-array-of-objects/ * Config objects * * @type {Array<number|Array<(number|string),number>|*>} * @extends series.line.data * @product highcharts highstock * @apioption series.spline.data */
blue-eyed-devil/testCMS
externals/highcharts/es-modules/parts/SplineSeries.js
JavaScript
gpl-3.0
8,806
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import (absolute_import, division, print_function) __metaclass__ = type # # Copyright (C) 2017 Lenovo, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # # Module to Reset to factory settings of Lenovo Switches # Lenovo Networking # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: cnos_factory author: "Anil Kumar Muraleedharan (@amuraleedhar)" short_description: Reset the switch's startup configuration to default (factory) on devices running Lenovo CNOS description: - This module allows you to reset a switch's startup configuration. The method provides a way to reset the startup configuration to its factory settings. This is helpful when you want to move the switch to another topology as a new network device. This module uses SSH to manage network device configuration. The results of the operation can be viewed in results directory. For more information about this module from Lenovo and customizing it usage for your use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_factory.html) version_added: "2.3" extends_documentation_fragment: cnos options: {} ''' EXAMPLES = ''' Tasks : The following are examples of using the module cnos_reload. These are written in the main.yml file of the tasks directory. --- - name: Test Reset to factory cnos_factory: host: "{{ inventory_hostname }}" username: "{{ hostvars[inventory_hostname]['ansible_ssh_user'] }}" password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass'] }}" deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" outputfile: "./results/test_factory_{{ inventory_hostname }}_output.txt" ''' RETURN = ''' msg: description: Success or failure message returned: always type: string sample: "Switch Startup Config is Reset to factory settings" ''' import sys try: import paramiko HAS_PARAMIKO = True except ImportError: HAS_PARAMIKO = False import time import socket import array import json import time import re try: from ansible.module_utils.network.cnos import cnos HAS_LIB = True except: HAS_LIB = False from ansible.module_utils.basic import AnsibleModule from collections import defaultdict def main(): module = AnsibleModule( argument_spec=dict( outputfile=dict(required=True), host=dict(required=True), username=dict(required=True), password=dict(required=True, no_log=True), enablePassword=dict(required=False, no_log=True), deviceType=dict(required=True),), supports_check_mode=False) username = module.params['username'] password = module.params['password'] enablePassword = module.params['enablePassword'] cliCommand = "save erase \n" outputfile = module.params['outputfile'] hostIP = module.params['host'] deviceType = module.params['deviceType'] output = "" if not HAS_PARAMIKO: module.fail_json(msg='paramiko is required for this module') # Create instance of SSHClient object remote_conn_pre = paramiko.SSHClient() # Automatically add untrusted hosts (make sure okay for security policy in your environment) remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # initiate SSH connection with the switch remote_conn_pre.connect(hostIP, username=username, password=password) time.sleep(2) # Use invoke_shell to establish an 'interactive session' remote_conn = remote_conn_pre.invoke_shell() time.sleep(2) # Enable and enter configure terminal then send command output = output + cnos.waitForDeviceResponse("\n", ">", 2, remote_conn) output = output + cnos.enterEnableModeForDevice(enablePassword, 3, remote_conn) # Make terminal length = 0 output = output + cnos.waitForDeviceResponse("terminal length 0\n", "#", 2, remote_conn) # cnos.debugOutput(cliCommand) # Send the CLi command output = output + cnos.waitForDeviceResponse(cliCommand, "[n]", 2, remote_conn) output = output + cnos.waitForDeviceResponse("y" + "\n", "#", 2, remote_conn) # Save it into the file file = open(outputfile, "a") file.write(output) file.close() errorMsg = cnos.checkOutputForError(output) if(errorMsg is None): module.exit_json(changed=True, msg="Switch Startup Config is Reset to factory settings ") else: module.fail_json(msg=errorMsg) if __name__ == '__main__': main()
hryamzik/ansible
lib/ansible/modules/network/cnos/cnos_factory.py
Python
gpl-3.0
5,299
package org.zarroboogs.weibo.dao; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.zarroboogs.util.net.HttpUtility; import org.zarroboogs.util.net.WeiboException; import org.zarroboogs.util.net.HttpUtility.HttpMethod; import org.zarroboogs.utils.ImageUtility; import org.zarroboogs.utils.WeiBoURLs; import org.zarroboogs.utils.file.FileLocationMethod; import org.zarroboogs.utils.file.FileManager; import org.zarroboogs.weibo.support.asyncdrawable.TaskCache; import android.graphics.Bitmap; import android.text.TextUtils; import java.util.HashMap; import java.util.Map; public class MapDao { public Bitmap getMap() throws WeiboException { String url = WeiBoURLs.STATIC_MAP; Map<String, String> map = new HashMap<String, String>(); map.put("access_token", access_token); String coordinates = String.valueOf(lat) + "," + String.valueOf(lan); map.put("center_coordinate", coordinates); map.put("zoom", "14"); map.put("size", "600x380"); String jsonData = HttpUtility.getInstance().executeNormalTask(HttpMethod.Get, url, map); String mapUrl = ""; try { JSONObject jsonObject = new JSONObject(jsonData); JSONArray array = jsonObject.optJSONArray("map"); jsonObject = array.getJSONObject(0); mapUrl = jsonObject.getString("image_url"); } catch (JSONException e) { } if (TextUtils.isEmpty(mapUrl)) { return null; } String filePath = FileManager.getFilePathFromUrl(mapUrl, FileLocationMethod.map); boolean downloaded = TaskCache.waitForPictureDownload(mapUrl, null, filePath, FileLocationMethod.map); if (!downloaded) { return null; } Bitmap bitmap = ImageUtility.readNormalPic(FileManager.getFilePathFromUrl(mapUrl, FileLocationMethod.map), -1, -1); return bitmap; } public MapDao(String token, double lan, double lat) { this.access_token = token; this.lan = lan; this.lat = lat; } private String access_token; private double lan; private double lat; }
tsdl2013/iBeebo
app/src/main/java/org/zarroboogs/weibo/dao/MapDao.java
Java
gpl-3.0
2,199
/** * Copyright (c) 2006-2015, JGraph Ltd * Copyright (c) 2006-2015, Gaudenz Alder */ /** * Class: mxHandle * * Implements a single custom handle for vertices. * * Constructor: mxHandle * * Constructs a new handle for the given state. * * Parameters: * * state - <mxCellState> of the cell to be handled. */ function mxHandle(state, cursor, image) { this.graph = state.view.graph; this.state = state; this.cursor = (cursor != null) ? cursor : this.cursor; this.image = (image != null) ? image : this.image; this.init(); }; /** * Variable: cursor * * Specifies the cursor to be used for this handle. Default is 'default'. */ mxHandle.prototype.cursor = 'default'; /** * Variable: image * * Specifies the <mxImage> to be used to render the handle. Default is null. */ mxHandle.prototype.image = null; /** * Variable: image * * Specifies the <mxImage> to be used to render the handle. Default is null. */ mxHandle.prototype.ignoreGrid = false; /** * Function: getPosition * * Hook for subclassers to return the current position of the handle. */ mxHandle.prototype.getPosition = function(bounds) { }; /** * Function: setPosition * * Hooks for subclassers to update the style in the <state>. */ mxHandle.prototype.setPosition = function(bounds, pt, me) { }; /** * Function: execute * * Hook for subclassers to execute the handle. */ mxHandle.prototype.execute = function() { }; /** * Function: copyStyle * * Sets the cell style with the given name to the corresponding value in <state>. */ mxHandle.prototype.copyStyle = function(key) { this.graph.setCellStyles(key, this.state.style[key], [this.state.cell]); }; /** * Function: processEvent * * Processes the given <mxMouseEvent> and invokes <setPosition>. */ mxHandle.prototype.processEvent = function(me) { var scale = this.graph.view.scale; var tr = this.graph.view.translate; var pt = new mxPoint(me.getGraphX() / scale - tr.x, me.getGraphY() / scale - tr.y); // Center shape on mouse cursor if (this.shape != null && this.shape.bounds != null) { pt.x -= this.shape.bounds.width / scale / 4; pt.y -= this.shape.bounds.height / scale / 4; } // Snaps to grid for the rotated position then applies the rotation for the direction after that var alpha1 = -mxUtils.toRadians(this.getRotation()); var alpha2 = -mxUtils.toRadians(this.getTotalRotation()) - alpha1; pt = this.flipPoint(this.rotatePoint(this.snapPoint(this.rotatePoint(pt, alpha1), this.ignoreGrid || !this.graph.isGridEnabledEvent(me.getEvent())), alpha2)); this.setPosition(this.state.getPaintBounds(), pt, me); this.positionChanged(); this.redraw(); }; /** * Function: positionChanged * * Called after <setPosition> has been called in <processEvent>. This repaints * the state using <mxCellRenderer>. */ mxHandle.prototype.positionChanged = function() { if (this.state.text != null) { this.state.text.apply(this.state); } if (this.state.shape != null) { this.state.shape.apply(this.state); } this.graph.cellRenderer.redraw(this.state, true); }; /** * Function: getRotation * * Returns the rotation defined in the style of the cell. */ mxHandle.prototype.getRotation = function() { if (this.state.shape != null) { return this.state.shape.getRotation(); } return 0; }; /** * Function: getTotalRotation * * Returns the rotation from the style and the rotation from the direction of * the cell. */ mxHandle.prototype.getTotalRotation = function() { if (this.state.shape != null) { return this.state.shape.getShapeRotation(); } return 0; }; /** * Function: init * * Creates and initializes the shapes required for this handle. */ mxHandle.prototype.init = function() { var html = this.isHtmlRequired(); if (this.image != null) { this.shape = new mxImageShape(new mxRectangle(0, 0, this.image.width, this.image.height), this.image.src); this.shape.preserveImageAspect = false; } else { this.shape = this.createShape(html); } this.initShape(html); }; /** * Function: createShape * * Creates and returns the shape for this handle. */ mxHandle.prototype.createShape = function(html) { var bounds = new mxRectangle(0, 0, mxConstants.HANDLE_SIZE, mxConstants.HANDLE_SIZE); return new mxRectangleShape(bounds, mxConstants.HANDLE_FILLCOLOR, mxConstants.HANDLE_STROKECOLOR); }; /** * Function: initShape * * Initializes <shape> and sets its cursor. */ mxHandle.prototype.initShape = function(html) { if (html && this.shape.isHtmlAllowed()) { this.shape.dialect = mxConstants.DIALECT_STRICTHTML; this.shape.init(this.graph.container); } else { this.shape.dialect = (this.graph.dialect != mxConstants.DIALECT_SVG) ? mxConstants.DIALECT_MIXEDHTML : mxConstants.DIALECT_SVG; if (this.cursor != null) { this.shape.init(this.graph.getView().getOverlayPane()); } } mxEvent.redirectMouseEvents(this.shape.node, this.graph, this.state); this.shape.node.style.cursor = this.cursor; }; /** * Function: redraw * * Renders the shape for this handle. */ mxHandle.prototype.redraw = function() { if (this.shape != null && this.state.shape != null) { var pt = this.getPosition(this.state.getPaintBounds()); if (pt != null) { var alpha = mxUtils.toRadians(this.getTotalRotation()); pt = this.rotatePoint(this.flipPoint(pt), alpha); var scale = this.graph.view.scale; var tr = this.graph.view.translate; this.shape.bounds.x = Math.floor((pt.x + tr.x) * scale - this.shape.bounds.width / 2); this.shape.bounds.y = Math.floor((pt.y + tr.y) * scale - this.shape.bounds.height / 2); // Needed to force update of text bounds this.state.unscaledWidth = null; this.shape.redraw(); } } }; /** * Function: isHtmlRequired * * Returns true if this handle should be rendered in HTML. This returns true if * the text node is in the graph container. */ mxHandle.prototype.isHtmlRequired = function() { return this.state.text != null && this.state.text.node.parentNode == this.graph.container; }; /** * Function: rotatePoint * * Rotates the point by the given angle. */ mxHandle.prototype.rotatePoint = function(pt, alpha) { var bounds = this.state.getCellBounds(); var cx = new mxPoint(bounds.getCenterX(), bounds.getCenterY()); var cos = Math.cos(alpha); var sin = Math.sin(alpha); return mxUtils.getRotatedPoint(pt, cos, sin, cx); }; /** * Function: flipPoint * * Flips the given point vertically and/or horizontally. */ mxHandle.prototype.flipPoint = function(pt) { if (this.state.shape != null) { var bounds = this.state.getCellBounds(); if (this.state.shape.flipH) { pt.x = 2 * bounds.x + bounds.width - pt.x; } if (this.state.shape.flipV) { pt.y = 2 * bounds.y + bounds.height - pt.y; } } return pt; }; /** * Function: snapPoint * * Snaps the given point to the grid if ignore is false. This modifies * the given point in-place and also returns it. */ mxHandle.prototype.snapPoint = function(pt, ignore) { if (!ignore) { pt.x = this.graph.snap(pt.x); pt.y = this.graph.snap(pt.y); } return pt; }; /** * Function: setVisible * * Shows or hides this handle. */ mxHandle.prototype.setVisible = function(visible) { if (this.shape != null && this.shape.node != null) { this.shape.node.style.display = (visible) ? '' : 'none'; } }; /** * Function: reset * * Resets the state of this handle by setting its visibility to true. */ mxHandle.prototype.reset = function() { this.setVisible(true); this.state.style = this.graph.getCellStyle(this.state.cell); this.positionChanged(); }; /** * Function: destroy * * Destroys this handle. */ mxHandle.prototype.destroy = function() { if (this.shape != null) { this.shape.destroy(); this.shape = null; } };
kyro46/assMxGraphQuestion
templates/mxgraph/js/handler/mxHandle.js
JavaScript
gpl-3.0
7,788
#region License // Copyright (c) 2013, ClearCanvas Inc. // All rights reserved. // http://www.clearcanvas.ca // // This file is part of the ClearCanvas RIS/PACS open source project. // // The ClearCanvas RIS/PACS open source project is free software: you can // redistribute it and/or modify it under the terms of the GNU General Public // License as published by the Free Software Foundation, either version 3 of the // License, or (at your option) any later version. // // The ClearCanvas RIS/PACS open source project is distributed in the hope that it // will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General // Public License for more details. // // You should have received a copy of the GNU General Public License along with // the ClearCanvas RIS/PACS open source project. If not, see // <http://www.gnu.org/licenses/>. #endregion using System; using System.Collections; using System.Collections.Generic; using ClearCanvas.Common; using ClearCanvas.Desktop; using ClearCanvas.Enterprise.Common; using ClearCanvas.Ris.Application.Common; using ClearCanvas.Ris.Application.Common.Admin.LocationAdmin; using ClearCanvas.Ris.Application.Common.Admin.FacilityAdmin; using ClearCanvas.Desktop.Validation; namespace ClearCanvas.Ris.Client.Admin { /// <summary> /// Extension point for views onto <see cref="LocationEditorComponent"/> /// </summary> [ExtensionPoint] public class LocationEditorComponentViewExtensionPoint : ExtensionPoint<IApplicationComponentView> { } /// <summary> /// LocationEditorComponent class /// </summary> [AssociateView(typeof(LocationEditorComponentViewExtensionPoint))] public class LocationEditorComponent : ApplicationComponent { private List<FacilitySummary> _facilityChoices; private LocationDetail _locationDetail; private EntityRef _locationRef; private readonly bool _isNew; private LocationSummary _locationSummary; /// <summary> /// Constructor /// </summary> public LocationEditorComponent() { _isNew = true; } public LocationEditorComponent(EntityRef locationRef) { _isNew = false; _locationRef = locationRef; } public LocationSummary LocationSummary { get { return _locationSummary; } } public override void Start() { if (_isNew) { _locationDetail = new LocationDetail(); } else { Platform.GetService( delegate(ILocationAdminService service) { var response = service.LoadLocationForEdit(new LoadLocationForEditRequest(_locationRef)); _locationRef = response.LocationDetail.LocationRef; _locationDetail = response.LocationDetail; }); } Platform.GetService( delegate(IFacilityAdminService service) { var response = service.ListAllFacilities(new ListAllFacilitiesRequest()); _facilityChoices = response.Facilities; if (_isNew && _locationDetail.Facility == null && response.Facilities.Count > 0) { _locationDetail.Facility = response.Facilities[0]; } }); base.Start(); } public LocationDetail LocationDetail { get { return _locationDetail; } set { _locationDetail = value; } } #region Presentation Model [ValidateNotNull] public string Id { get { return _locationDetail.Id; } set { _locationDetail.Id = value; this.Modified = true; } } [ValidateNotNull] public string Name { get { return _locationDetail.Name; } set { _locationDetail.Name = value; this.Modified = true; } } public string Description { get { return _locationDetail.Description; } set { _locationDetail.Description = value; this.Modified = true; } } public IList FacilityChoices { get { return _facilityChoices; } } [ValidateNotNull] public FacilitySummary Facility { get { return _locationDetail.Facility; } set { _locationDetail.Facility = value; this.Modified = true; } } public string FormatFacility(object item) { var f = (FacilitySummary) item; return f.Name; } public string Building { get { return _locationDetail.Building; } set { _locationDetail.Building = value; this.Modified = true; } } public string Floor { get { return _locationDetail.Floor; } set { _locationDetail.Floor = value; this.Modified = true; } } public string PointOfCare { get { return _locationDetail.PointOfCare; } set { _locationDetail.PointOfCare = value; this.Modified = true; } } public void Accept() { if (this.HasValidationErrors) { this.ShowValidation(true); } else { try { SaveChanges(); this.Exit(ApplicationComponentExitCode.Accepted); } catch (Exception e) { ExceptionHandler.Report(e, SR.ExceptionSaveLocation, this.Host.DesktopWindow, delegate { this.ExitCode = ApplicationComponentExitCode.Error; this.Host.Exit(); }); } } } public void Cancel() { this.ExitCode = ApplicationComponentExitCode.None; Host.Exit(); } public bool AcceptEnabled { get { return this.Modified; } } #endregion private void SaveChanges() { if (_isNew) { Platform.GetService( delegate(ILocationAdminService service) { var response = service.AddLocation(new AddLocationRequest(_locationDetail)); _locationRef = response.Location.LocationRef; _locationSummary = response.Location; }); } else { Platform.GetService( delegate(ILocationAdminService service) { var response = service.UpdateLocation(new UpdateLocationRequest(_locationDetail)); _locationRef = response.Location.LocationRef; _locationSummary = response.Location; }); } } public event EventHandler AcceptEnabledChanged { add { this.ModifiedChanged += value; } remove { this.ModifiedChanged -= value; } } } }
chinapacs/ImageViewer
Ris/Client/Admin/LocationEditorComponent.cs
C#
gpl-3.0
6,291
#region License // Copyright (c) 2013, ClearCanvas Inc. // All rights reserved. // http://www.clearcanvas.ca // // This file is part of the ClearCanvas RIS/PACS open source project. // // The ClearCanvas RIS/PACS open source project is free software: you can // redistribute it and/or modify it under the terms of the GNU General Public // License as published by the Free Software Foundation, either version 3 of the // License, or (at your option) any later version. // // The ClearCanvas RIS/PACS open source project is distributed in the hope that it // will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General // Public License for more details. // // You should have received a copy of the GNU General Public License along with // the ClearCanvas RIS/PACS open source project. If not, see // <http://www.gnu.org/licenses/>. #endregion using System; using System.Diagnostics; using System.IO; using ClearCanvas.Common; using ClearCanvas.Common.Utilities; using ClearCanvas.Dicom.Utilities.Command; using ClearCanvas.ImageServer.Common; using ClearCanvas.ImageServer.Common.Utilities; using ClearCanvas.ImageServer.Core.Command; using ClearCanvas.ImageServer.Core.Data; using ClearCanvas.ImageServer.Core.Validation; using ClearCanvas.ImageServer.Enterprise.Command; using ClearCanvas.ImageServer.Model; namespace ClearCanvas.ImageServer.Services.WorkQueue.CleanupReconcile { /// <summary> /// For processing 'CleanupReconcile' WorkQueue items. /// </summary> [StudyIntegrityValidation(ValidationTypes = StudyIntegrityValidationModes.None)] class CleanupReconcileItemProcessor : BaseItemProcessor { private ReconcileStudyWorkQueueData _reconcileQueueData; protected override bool CanStart() { return true; } protected override void ProcessItem(Model.WorkQueue item) { Platform.CheckForNullReference(item, "item"); Platform.CheckForNullReference(item.Data, "item.Data"); _reconcileQueueData = XmlUtils.Deserialize<ReconcileStudyWorkQueueData>(WorkQueueItem.Data); LoadUids(item); if (WorkQueueUidList.Count == 0) { DirectoryUtility.DeleteIfEmpty(_reconcileQueueData.StoragePath); Platform.Log(LogLevel.Info, "Reconcile Cleanup is completed. GUID={0}.", WorkQueueItem.GetKey()); PostProcessing(WorkQueueItem, WorkQueueProcessorStatus.Complete, WorkQueueProcessorDatabaseUpdate.ResetQueueState); } else { Platform.Log(LogLevel.Info, "Starting Cleanup of Reconcile Queue item for study {0} for Patient {1} (PatientId:{2} A#:{3}) on Partition {4}, {5} objects", Study.StudyInstanceUid, Study.PatientsName, Study.PatientId, Study.AccessionNumber, ServerPartition.Description, WorkQueueUidList.Count); ProcessUidList(); Platform.Log(LogLevel.Info, "Successfully complete Reconcile Cleanup. GUID={0}. {0} uids processed.", WorkQueueItem.GetKey(), WorkQueueUidList.Count); PostProcessing(WorkQueueItem, WorkQueueProcessorStatus.Pending, WorkQueueProcessorDatabaseUpdate.None); } } private void ProcessUidList() { Platform.CheckForNullReference(WorkQueueUidList, "WorkQueueUidList"); foreach(WorkQueueUid uid in WorkQueueUidList) { ProcessUid(uid); } } private void ProcessUid(WorkQueueUid uid) { Platform.CheckForNullReference(uid, "uid"); string imagePath = GetUidPath(uid); using (ServerCommandProcessor processor = new ServerCommandProcessor(String.Format("Deleting {0}", uid.SopInstanceUid))) { // If the file for some reason doesn't exist, we just ignore it if (File.Exists(imagePath)) { Platform.Log(ServerPlatform.InstanceLogLevel, "Deleting {0}", imagePath); FileDeleteCommand deleteFile = new FileDeleteCommand(imagePath, true); processor.AddCommand(deleteFile); } else { Platform.Log(LogLevel.Warn, "WARNING {0} is missing.", imagePath); } DeleteWorkQueueUidCommand deleteUid = new DeleteWorkQueueUidCommand(uid); processor.AddCommand(deleteUid); if (!processor.Execute()) { throw new Exception(String.Format("Unable to delete image {0}", uid.SopInstanceUid)); } } } private string GetUidPath(WorkQueueUid sop) { string imagePath = Path.Combine(_reconcileQueueData.StoragePath, sop.SopInstanceUid + ServerPlatform.DicomFileExtension); Debug.Assert(String.IsNullOrEmpty(imagePath)==false); return imagePath; } } }
chinapacs/ImageViewer
ImageServer/Services/WorkQueue/CleanupReconcile/CleanupReconcileItemProcessor.cs
C#
gpl-3.0
5,141
/* -*- c++ -*- */ /* * Copyright 2015,2016 Free Software Foundation, Inc. * * This is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3, or (at your option) * any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this software; see the file COPYING. If not, write to * the Free Software Foundation, Inc., 51 Franklin Street, * Boston, MA 02110-1301, USA. */ #ifdef HAVE_CONFIG_H #include "config.h" #endif #include <gnuradio/io_signature.h> #include "dvbt_reference_signals_impl.h" #include <complex> #include <gnuradio/expj.h> #include <gnuradio/math.h> namespace gr { namespace dtv { //Number of symbols in a frame const int dvbt_pilot_gen::d_symbols_per_frame = SYMBOLS_PER_FRAME; //Number of frames in a superframe const int dvbt_pilot_gen::d_frames_per_superframe = FRAMES_PER_SUPERFRAME; // 2k mode // Scattered pilots # of carriers const int dvbt_pilot_gen::d_spilot_carriers_size_2k = SCATTERED_PILOT_SIZE_2k; // Continual pilots # of carriers and positions const int dvbt_pilot_gen::d_cpilot_carriers_size_2k = CONTINUAL_PILOT_SIZE_2k; const int dvbt_pilot_gen::d_cpilot_carriers_2k[dvbt_pilot_gen::d_cpilot_carriers_size_2k] = { 0, 48, 54, 87, 141, 156, 192, \ 201, 255, 279, 282, 333, 432, 450, \ 483, 525, 531, 618, 636, 714, 759, \ 765, 780, 804, 873, 888, 918, 939, \ 942, 969, 984, 1050, 1101, 1107, 1110, \ 1137, 1140, 1146, 1206, 1269, 1323, 1377, \ 1491, 1683, 1704 }; // TPS pilots # of carriers and positions const int dvbt_pilot_gen::d_tps_carriers_size_2k = TPS_PILOT_SIZE_2k; const int dvbt_pilot_gen::d_tps_carriers_2k[dvbt_pilot_gen::d_tps_carriers_size_2k] = { 34, 50, 209, 346, 413, \ 569, 595, 688, 790, 901, \ 1073, 1219, 1262, 1286, 1469, \ 1594, 1687 }; // 8k mode // Scattered pilots # of carriers const int dvbt_pilot_gen::d_spilot_carriers_size_8k = SCATTERED_PILOT_SIZE_8k; // Continual pilots # of carriers and positions const int dvbt_pilot_gen::d_cpilot_carriers_size_8k = CONTINUAL_PILOT_SIZE_8k; const int dvbt_pilot_gen::d_cpilot_carriers_8k[dvbt_pilot_gen::d_cpilot_carriers_size_8k] = { 0, 48, 54, 87, 141, 156, 192, 201, 255, 279, 282, 333, 432, 450, 483, 525, 531, 618, 636, 714, 759, 765, 780, 804, 873, 888, 918, 939, 942, 969, 984, 1050, 1101, 1107, 1110, 1137, 1140, 1146, 1206, 1269, 1323, 1377, 1491, 1683, 1704, 1752, 1758, 1791, 1845, 1860, 1896, 1905, 1959, 1983, 1986, 2037, 2136, 2154, 2187, 2229, 2235, 2322, 2340, 2418, 2463, 2469, 2484, 2508, 2577, 2592, 2622, 2643, 2646, 2673, 2688, 2754, 2805, 2811, 2814, 2841, 2844, 2850, 2910, 2973, 3027, 3081, 3195, 3387, 3408, 3456, 3462, 3495, 3549, 3564, 3600, 3609, 3663, 3687, 3690, 3741, 3840, 3858, 3891, 3933, 3939, 4026, 4044, 4122, 4167, 4173, 4188, 4212, 4281, 4296, 4326, 4347, 4350, 4377, 4392, 4458, 4509, 4515, 4518, 4545, 4548, 4554, 4614, 4677, 4731, 4785, 4899, 5091, 5112, 5160, 5166, 5199, 5253, 5268, 5304, 5313, 5367, 5391, 5394, 5445, 5544, 5562, 5595, 5637, 5643, 5730, 5748, 5826, 5871, 5877, 5892, 5916, 5985, 6000, 6030, 6051, 6054, 6081, 6096, 6162, 6213, 6219, 6222, 6249, 6252, 6258, 6318, 6381, 6435, 6489, 6603, 6795, 6816 }; // TPS pilots # of carriers and positions const int dvbt_pilot_gen::d_tps_carriers_size_8k = TPS_PILOT_SIZE_8k; const int dvbt_pilot_gen::d_tps_carriers_8k[dvbt_pilot_gen::d_tps_carriers_size_8k] = { 34, 50, 209, 346, 413, 569, 595, 688, \ 790, 901, 1073, 1219, 1262, 1286, 1469, 1594, \ 1687, 1738, 1754, 1913, 2050, 2117, 2273, 2299, \ 2392, 2494, 2605, 2777, 2923, 2966, 2990, 3173, \ 3298, 3391, 3442, 3458, 3617, 3754, 3821, 3977, \ 4003, 4096, 4198, 4309, 4481, 4627, 4670, 4694, \ 4877, 5002, 5095, 5146, 5162, 5321, 5458, 5525, \ 5681, 5707, 5800, 5902, 6013, 6185, 6331, 6374, \ 6398, 6581, 6706, 6799 }; // TPS sync sequence for odd and even frames const int dvbt_pilot_gen::d_tps_sync_size = 16; // TODO const int dvbt_pilot_gen::d_tps_sync_even[d_tps_sync_size] = { 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0 }; const int dvbt_pilot_gen::d_tps_sync_odd[d_tps_sync_size] = { 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1 }; /* * Constructor of class */ dvbt_pilot_gen::dvbt_pilot_gen(const dvbt_configure &c) : config(c), d_spilot_index(0), d_cpilot_index(0), d_tpilot_index(0), d_symbol_index(0), d_symbol_index_known(0), d_frame_index(0), d_superframe_index(0), d_freq_offset_max(8), d_trigger_index(0), d_payload_index(0), d_chanestim_index(0), d_prev_mod_symbol_index(0), d_mod_symbol_index(0) { //Determine parameters from config file d_Kmin = config.d_Kmin; d_Kmax = config.d_Kmax; d_fft_length = config.d_fft_length; d_payload_length = config.d_payload_length; d_zeros_on_left = config.d_zeros_on_left; d_zeros_on_right = config.d_zeros_on_right; d_cp_length = config.d_cp_length; //Set-up pilot data depending on transmission mode if (config.d_transmission_mode == T2k) { d_spilot_carriers_size = d_spilot_carriers_size_2k; d_cpilot_carriers_size = d_cpilot_carriers_size_2k; d_cpilot_carriers = d_cpilot_carriers_2k; d_tps_carriers_size = d_tps_carriers_size_2k; d_tps_carriers = d_tps_carriers_2k; } else if (config.d_transmission_mode == T8k) { d_spilot_carriers_size = d_spilot_carriers_size_8k; d_cpilot_carriers_size = d_cpilot_carriers_size_8k; d_cpilot_carriers = d_cpilot_carriers_8k; d_tps_carriers_size = d_tps_carriers_size_8k; d_tps_carriers = d_tps_carriers_8k; } else { d_spilot_carriers_size = d_spilot_carriers_size_2k; d_cpilot_carriers_size = d_cpilot_carriers_size_2k; d_cpilot_carriers = d_cpilot_carriers_2k; d_tps_carriers_size = d_tps_carriers_size_2k; d_tps_carriers = d_tps_carriers_2k; } d_freq_offset = 0; d_carrier_freq_correction = 0.0; d_sampling_freq_correction = 0.0; //allocate PRBS buffer d_wk = new char[d_Kmax - d_Kmin + 1]; if (d_wk == NULL) { std::cerr << "Reference Signals, cannot allocate memory for d_wk." << std::endl; throw std::bad_alloc(); } // Generate wk sequence generate_prbs(); // allocate buffer for scattered pilots d_spilot_carriers_val = new (std::nothrow) gr_complex[d_Kmax - d_Kmin + 1]; if (d_spilot_carriers_val == NULL) { std::cerr << "Reference Signals, cannot allocate memory for d_spilot_carriers_val." << std::endl; delete [] d_wk; throw std::bad_alloc(); } // allocate buffer for channel gains (for each useful carrier) d_channel_gain = new (std::nothrow) gr_complex[d_Kmax - d_Kmin + 1]; if (d_channel_gain == NULL) { std::cerr << "Reference Signals, cannot allocate memory for d_channel_gain." << std::endl; delete [] d_spilot_carriers_val; delete [] d_wk; throw std::bad_alloc(); } // Allocate buffer for continual pilots phase diffs d_known_phase_diff = new (std::nothrow) float[d_cpilot_carriers_size - 1]; if (d_known_phase_diff == NULL) { std::cerr << "Reference Signals, cannot allocate memory for d_known_phase_diff." << std::endl; delete [] d_channel_gain; delete [] d_spilot_carriers_val; delete [] d_wk; throw std::bad_alloc(); } // Obtain phase diff for all continual pilots for (int i = 0; i < (d_cpilot_carriers_size - 1); i++) { d_known_phase_diff[i] = \ norm(get_cpilot_value(d_cpilot_carriers[i + 1]) - get_cpilot_value(d_cpilot_carriers[i])); } d_cpilot_phase_diff = new (std::nothrow) float[d_cpilot_carriers_size - 1]; if (d_cpilot_phase_diff == NULL) { std::cerr << "Reference Signals, cannot allocate memory for d_cpilot_phase_diff." << std::endl; delete [] d_known_phase_diff; delete [] d_channel_gain; delete [] d_spilot_carriers_val; delete [] d_wk; throw std::bad_alloc(); } // Allocate buffer for derotated input symbol d_derot_in = new (std::nothrow) gr_complex[d_fft_length]; if (d_derot_in == NULL) { std::cerr << "Reference Signals, cannot allocate memory for d_derot_in." << std::endl; delete [] d_cpilot_phase_diff; delete [] d_known_phase_diff; delete [] d_channel_gain; delete [] d_spilot_carriers_val; delete [] d_wk; throw std::bad_alloc(); } // allocate buffer for first tps symbol constellation d_tps_carriers_val = new (std::nothrow) gr_complex[d_tps_carriers_size]; if (d_tps_carriers_val == NULL) { std::cerr << "Reference Signals, cannot allocate memory for d_tps_carriers_val." << std::endl; delete [] d_derot_in; delete [] d_cpilot_phase_diff; delete [] d_known_phase_diff; delete [] d_channel_gain; delete [] d_spilot_carriers_val; delete [] d_wk; throw std::bad_alloc(); } // allocate tps data buffer d_tps_data = new (std::nothrow) unsigned char[d_symbols_per_frame]; if (d_tps_data == NULL) { std::cerr << "Reference Signals, cannot allocate memory for d_tps_data." << std::endl; delete [] d_tps_carriers_val; delete [] d_derot_in; delete [] d_cpilot_phase_diff; delete [] d_known_phase_diff; delete [] d_channel_gain; delete [] d_spilot_carriers_val; delete [] d_wk; throw std::bad_alloc(); } d_prev_tps_symbol = new (std::nothrow) gr_complex[d_tps_carriers_size]; if (d_prev_tps_symbol == NULL) { std::cerr << "Reference Signals, cannot allocate memory for d_prev_tps_symbol." << std::endl; delete [] d_tps_data; delete [] d_tps_carriers_val; delete [] d_derot_in; delete [] d_cpilot_phase_diff; delete [] d_known_phase_diff; delete [] d_channel_gain; delete [] d_spilot_carriers_val; delete [] d_wk; throw std::bad_alloc(); } memset(d_prev_tps_symbol, 0, d_tps_carriers_size * sizeof(gr_complex)); d_tps_symbol = new (std::nothrow) gr_complex[d_tps_carriers_size]; if (d_tps_symbol == NULL) { std::cerr << "Reference Signals, cannot allocate memory for d_tps_symbol." << std::endl; delete [] d_prev_tps_symbol; delete [] d_tps_data; delete [] d_tps_carriers_val; delete [] d_derot_in; delete [] d_cpilot_phase_diff; delete [] d_known_phase_diff; delete [] d_channel_gain; delete [] d_spilot_carriers_val; delete [] d_wk; throw std::bad_alloc(); } memset(d_tps_symbol, 0, d_tps_carriers_size * sizeof(gr_complex)); // Init receive TPS data vector for (int i = 0; i < d_symbols_per_frame; i++) { d_rcv_tps_data.push_back(0); } // Init TPS sync sequence for (int i = 0; i < d_tps_sync_size; i++) { d_tps_sync_evenv.push_back(d_tps_sync_even[i]); d_tps_sync_oddv.push_back(d_tps_sync_odd[i]); } // Allocate buffer for channel estimation carriers d_chanestim_carriers = new (std::nothrow) int[d_Kmax - d_Kmin + 1]; if (d_chanestim_carriers == NULL) { std::cerr << "Reference Signals, cannot allocate memory for d_chanestim_carriers." << std::endl; delete [] d_tps_symbol; delete [] d_prev_tps_symbol; delete [] d_tps_data; delete [] d_tps_carriers_val; delete [] d_derot_in; delete [] d_cpilot_phase_diff; delete [] d_known_phase_diff; delete [] d_channel_gain; delete [] d_spilot_carriers_val; delete [] d_wk; throw std::bad_alloc(); } // Allocate buffer for payload carriers d_payload_carriers = new (std::nothrow) int[d_Kmax - d_Kmin + 1]; if (d_payload_carriers == NULL) { std::cerr << "Reference Signals, cannot allocate memory for d_payload_carriers." << std::endl; delete [] d_chanestim_carriers; delete [] d_tps_symbol; delete [] d_prev_tps_symbol; delete [] d_tps_data; delete [] d_tps_carriers_val; delete [] d_derot_in; delete [] d_cpilot_phase_diff; delete [] d_known_phase_diff; delete [] d_channel_gain; delete [] d_spilot_carriers_val; delete [] d_wk; throw std::bad_alloc(); } // Reset the pilot generator reset_pilot_generator(); // Format TPS data with current values format_tps_data(); } /* * Destructor of class */ dvbt_pilot_gen::~dvbt_pilot_gen() { delete [] d_payload_carriers; delete [] d_chanestim_carriers; delete [] d_tps_symbol; delete [] d_prev_tps_symbol; delete [] d_tps_data; delete [] d_tps_carriers_val; delete [] d_derot_in; delete [] d_cpilot_phase_diff; delete [] d_known_phase_diff; delete [] d_channel_gain; delete [] d_spilot_carriers_val; delete [] d_wk; } /* * Generate PRBS sequence * X^11 + X^2 + 1 * en 300 744 - section 4.5.2 */ void dvbt_pilot_gen::generate_prbs() { // init PRBS register with 1s unsigned int reg_prbs = (1 << 11) - 1; for (int k = 0; k < (d_Kmax - d_Kmin + 1); k++) { d_wk[k] = (char)(reg_prbs & 0x01); int new_bit = ((reg_prbs >> 2) ^ (reg_prbs >> 0)) & 0x01; reg_prbs = (reg_prbs >> 1) | (new_bit << 10); } } /* * Generate shortened BCH(67, 53) codes from TPS data * Extend the code with 60 bits and use BCH(127, 113) */ void dvbt_pilot_gen::generate_bch_code() { //TODO //DO other way: if (feedback == 1) reg = reg ^ polymomial //else nothing //(n, k) = (127, 113) = (60+67, 60+53) unsigned int reg_bch = 0; unsigned char data_in[113]; //fill in 60 zeros memset(&data_in[0], 0, 60); //fill in TPS data - start bit not included memcpy(&data_in[60], &d_tps_data[1], 53); //X^14+X^9+X^8+X^6+X^5+X^4+X^2+X+1 for (int i = 0; i < 113; i++) { int feedback = 0x1 & (data_in[i] ^ reg_bch); reg_bch = reg_bch >> 1; reg_bch |= feedback << 13; reg_bch = reg_bch \ ^ (feedback << 12) ^ (feedback << 11) \ ^ (feedback << 9) ^ (feedback << 8) \ ^ (feedback << 7) ^ (feedback << 5) \ ^ (feedback << 4); } for (int i = 0; i < 14; i++) { d_tps_data[i + 54] = 0x1 & (reg_bch >> i); } } int dvbt_pilot_gen::verify_bch_code(std::deque<char> data) { int ret = 0; //TODO //DO other way: if (feedback == 1) reg = reg ^ polymomial //else nothing //(n, k) = (127, 113) = (60+67, 60+53) unsigned int reg_bch = 0; unsigned char data_in[113]; //fill in 60 zeros memset(&data_in[0], 0, 60); //fill in TPS data - start bit not included //memcpy(&data_in[60], &data[1], 53); for (int i = 0; i < 53; i++) { data_in[60 + i] = data[1 + i]; } //X^14+X^9+X^8+X^6+X^5+X^4+X^2+X+1 for (int i = 0; i < 113; i++) { int feedback = 0x1 & (data_in[i] ^ reg_bch); reg_bch = reg_bch >> 1; reg_bch |= feedback << 13; reg_bch = reg_bch \ ^ (feedback << 12) ^ (feedback << 11) \ ^ (feedback << 9) ^ (feedback << 8) \ ^ (feedback << 7) ^ (feedback << 5) \ ^ (feedback << 4); } for (int i = 0; i < 14; i++) { if ((unsigned int)data[i + 54] != (0x1 & (reg_bch >> i))) { ret = -1; break; } } return ret; } void dvbt_pilot_gen::set_symbol_index(int sindex) { d_symbol_index = sindex; } int dvbt_pilot_gen::get_symbol_index() { return d_symbol_index; } void dvbt_pilot_gen::set_tps_data() { } void dvbt_pilot_gen::get_tps_data() { } /* * Reset pilot generator */ void dvbt_pilot_gen::reset_pilot_generator() { d_spilot_index = 0; d_cpilot_index = 0; d_tpilot_index = 0; d_payload_index = 0; d_chanestim_index = 0; d_symbol_index = 0; d_frame_index = 0; d_superframe_index = 0; d_symbol_index_known = 0; d_equalizer_ready = 0; } /* * Init scattered pilot generator */ int dvbt_pilot_gen::get_current_spilot(int sindex) const { //TODO - can be optimized for same symbol_index return (d_Kmin + 3 * (sindex % 4) + 12 * d_spilot_index); } gr_complex dvbt_pilot_gen::get_spilot_value(int spilot) { // TODO - can be calculated at the beginning return gr_complex(4 * 2 * (0.5 - d_wk[spilot]) / 3, 0); } void dvbt_pilot_gen::set_spilot_value(int spilot, gr_complex val) { d_spilot_carriers_val[spilot] = val; } void dvbt_pilot_gen::set_channel_gain(int spilot, gr_complex val) { // Gain gval=rxval/txval d_channel_gain[spilot] = gr_complex((4 * 2 * (0.5 - d_wk[spilot]) / 3), 0) / val; } void dvbt_pilot_gen::advance_spilot(int sindex) { //TODO - do in a simpler way? int size = d_spilot_carriers_size; if (sindex == 0) { size = d_spilot_carriers_size + 1; } // TODO - fix this - what value should we use? ++d_spilot_index; d_spilot_index = d_spilot_index % size; } int dvbt_pilot_gen::get_first_spilot() { d_spilot_index = 0; return (d_Kmin + 3 * (d_symbol_index % 4)); } int dvbt_pilot_gen::get_last_spilot() const { int size = d_spilot_carriers_size - 1; if (d_symbol_index == 0) { size = d_spilot_carriers_size; } return (d_Kmin + 3 * (d_symbol_index % 4) + 12 * size); } int dvbt_pilot_gen::get_next_spilot() { int pilot = (d_Kmin + 3 * (d_symbol_index % 4) + 12 * (++d_spilot_index)); if (pilot > d_Kmax) { pilot = d_Kmax; } return pilot; } int dvbt_pilot_gen::process_spilot_data(const gr_complex * in) { // This is channel estimator // Interpolate the gain between carriers to obtain // gain for non pilot carriers - we use linear interpolation /*************************************************************/ // Find out the OFDM symbol index (value 0 to 3) sent // in current block by correlating scattered symbols with // current block - result is (symbol index % 4) /*************************************************************/ float max = 0; float sum = 0; for (int scount = 0; scount < 4; scount++) { d_spilot_index = 0; d_cpilot_index = 0; d_chanestim_index = 0; for (int k = 0; k < (d_Kmax - d_Kmin + 1); k++) { // Keep data for channel estimation if (k == get_current_spilot(scount)) { set_chanestim_carrier(k); advance_spilot(scount); advance_chanestim(); } } gr_complex c = gr_complex(0.0, 0.0); // This should be of range 0 to d_chanestim_index bit for now we use just a // small number of spilots to obtain the symbol index for (int j = 0; j < 10; j++) { c += get_spilot_value(d_chanestim_carriers[j]) * conj(in[d_zeros_on_left + d_chanestim_carriers[j]]); } sum = norm(c); if (sum > max) { max = sum; d_mod_symbol_index = scount; } } /*************************************************************/ // Keep data for channel estimator // This method interpolates scattered measurements across one OFDM symbol // It does not use measurements from the previous OFDM symbols (does not use history) // as it may have encountered a phase change for the current phase only /*************************************************************/ d_spilot_index = 0; d_cpilot_index = 0; d_chanestim_index = 0; for (int k = 0; k < (d_Kmax - d_Kmin + 1); k++) { // Keep data for channel estimation if (k == get_current_spilot(d_mod_symbol_index)) { set_chanestim_carrier(k); advance_spilot(d_mod_symbol_index); advance_chanestim(); } // Keep data for channel estimation if (k == get_current_cpilot()) { set_chanestim_carrier(k); advance_cpilot(); advance_chanestim(); } } // We use both scattered pilots and continual pilots for (int i = 0, startk = d_chanestim_carriers[0]; i < d_chanestim_index; i++) { // Get a carrier from the list of carriers // used for channel estimation int k = d_chanestim_carriers[i]; set_channel_gain(k, in[k + d_zeros_on_left]); // Calculate tg(alpha) due to linear interpolation gr_complex tg_alpha = (d_channel_gain[k] - d_channel_gain[startk]) / gr_complex(11.0, 0.0); // Calculate interpolation for all intermediate values for (int j = 1; j < (k - startk); j++) { gr_complex current = d_channel_gain[startk] + tg_alpha * gr_complex(j, 0.0); d_channel_gain[startk + j] = current; } startk = k; } // Signal that equalizer is ready d_equalizer_ready = 1; int diff_sindex = (d_mod_symbol_index - d_prev_mod_symbol_index + 4) % 4; d_prev_mod_symbol_index = d_mod_symbol_index; return diff_sindex; } /* * Init continual pilot generator */ int dvbt_pilot_gen::get_current_cpilot() const { return d_cpilot_carriers[d_cpilot_index]; } gr_complex dvbt_pilot_gen::get_cpilot_value(int cpilot) { //TODO - can be calculated at the beginning return gr_complex((float)(4 * 2 * (0.5 - d_wk[cpilot])) / 3, 0); } void dvbt_pilot_gen::advance_cpilot() { ++d_cpilot_index; d_cpilot_index = d_cpilot_index % d_cpilot_carriers_size; } void dvbt_pilot_gen::process_cpilot_data(const gr_complex * in) { // Look for maximum correlation for cpilots // in order to obtain post FFT integer frequency correction float max = 0; float sum = 0; int start = 0; float phase; for (int i = d_zeros_on_left - d_freq_offset_max; i < d_zeros_on_left + d_freq_offset_max; i++) { sum = 0; for (int j = 0; j < (d_cpilot_carriers_size - 1); j++) { phase = norm(in[i + d_cpilot_carriers[j + 1]] - in[i + d_cpilot_carriers[j]]); sum += d_known_phase_diff[j] * phase; } if (sum > max) { max = sum; start = i; } } d_freq_offset = start - d_zeros_on_left; } void dvbt_pilot_gen::compute_oneshot_csft(const gr_complex * in) { gr_complex left_corr_sum = 0.0; gr_complex right_corr_sum = 0.0; int half_size = (d_cpilot_carriers_size - 1) / 2; // TODO init this in constructor float carrier_coeff = 1.0 / (2 * M_PI * (1 + float (d_cp_length) / float (d_fft_length)) * 2); float sampling_coeff = 1.0 / (2 * M_PI * ((1 + float (d_cp_length) / float (d_fft_length)) * ((float)d_cpilot_carriers_size / 2.0))); float left_angle, right_angle; // Compute cpilots correlation between previous symbol and current symbol // in both halves of the cpilots. The cpilots are distributed evenly // on left and right sides of the center frequency. for (int j = 0; j < half_size; j++) { left_corr_sum += in[d_freq_offset + d_zeros_on_left + d_cpilot_carriers[j]] * \ std::conj(in[d_freq_offset + d_fft_length + d_zeros_on_left + d_cpilot_carriers[j]]); } for (int j = half_size + 1; j < d_cpilot_carriers_size; j++) { right_corr_sum += in[d_freq_offset + d_zeros_on_left + d_cpilot_carriers[j]] * \ std::conj(in[d_freq_offset + d_fft_length + d_zeros_on_left + d_cpilot_carriers[j]]); } left_angle = std::arg(left_corr_sum); right_angle = std::arg(right_corr_sum); d_carrier_freq_correction = (right_angle + left_angle) * carrier_coeff; d_sampling_freq_correction = (right_angle - left_angle) * sampling_coeff; } gr_complex * dvbt_pilot_gen::frequency_correction(const gr_complex * in, gr_complex * out) { // TODO - use PI control loop to calculate tracking corrections int symbol_count = 1; for (int k = 0; k < d_fft_length; k++) { // TODO - for 2k mode the continuous pilots are not split evenly // between left/right center frequency. Probably the scattered // pilots needs to be added. float correction = (float)d_freq_offset + d_carrier_freq_correction; gr_complex c = gr_expj(-2 * M_PI * correction * \ (d_fft_length + d_cp_length) / d_fft_length * symbol_count); // TODO - vectorize this operation out[k] = c * in[k + d_freq_offset]; } return (out); } /* * Init tps sequence, return values for first position * If first symbol then init tps DBPSK data */ int dvbt_pilot_gen::get_current_tpilot() const { return d_tps_carriers[d_tpilot_index]; } gr_complex dvbt_pilot_gen::get_tpilot_value(int tpilot) { //TODO - it can be calculated at the beginnning if (d_symbol_index == 0) { d_tps_carriers_val[d_tpilot_index] = gr_complex(2 * (0.5 - d_wk[tpilot]), 0); } else { if (d_tps_data[d_symbol_index] == 1) { d_tps_carriers_val[d_tpilot_index] = gr_complex(-d_tps_carriers_val[d_tpilot_index].real(), 0); } } return d_tps_carriers_val[d_tpilot_index]; } void dvbt_pilot_gen::advance_tpilot() { ++d_tpilot_index; d_tpilot_index = d_tpilot_index % d_tps_carriers_size; } /* * Set a number of bits to a specified value */ void dvbt_pilot_gen::set_tps_bits(int start, int stop, unsigned int data) { for (int i = start; i >= stop; i--) { d_tps_data[i] = data & 0x1; data = data >> 1; } } /* * Clause 4.6 * Format data that will be sent with TPS signals * en 300 744 - section 4.6.2 * s0 Initialization * s1-s16 Synchronization word * s17-s22 Length Indicator * s23-s24 Frame Number * S25-s26 Constellation * s27, s28, s29 Hierarchy information * s30, s31, s32 Code rate, HP stream * s33, s34, s35 Code rate, LP stream * s36, s37 Guard interval * s38, s39 Transmission mode * s40, s47 Cell identifier * s48-s53 All set to "0" * s54-s67 Error protection (BCH code) */ void dvbt_pilot_gen::format_tps_data() { //Clause 4.6.3 set_tps_bits(0, 0, d_wk[0]); //Clause 4.6.2.2 if (d_frame_index % 2) { set_tps_bits(16, 1, 0xca11); } else { set_tps_bits(16, 1, 0x35ee); } //Clause 4.6.2.3 if (config.d_include_cell_id) { set_tps_bits(22, 17, 0x1f); } else { set_tps_bits(22, 17, 0x17); } //Clause 4.6.2.4 set_tps_bits(24, 23, d_frame_index); //Clause 4.6.2.5 set_tps_bits(26, 25, config.d_constellation); //Clause 4.6.2.6 set_tps_bits(29, 27, config.d_hierarchy); //Clause 4.6.2.7 switch (config.d_code_rate_HP) { case C1_2: set_tps_bits(32, 30, 0); break; case C2_3: set_tps_bits(32, 30, 1); break; case C3_4: set_tps_bits(32, 30, 2); break; case C5_6: set_tps_bits(32, 30, 3); break; case C7_8: set_tps_bits(32, 30, 4); break; default: set_tps_bits(32, 30, 0); break; } switch (config.d_code_rate_LP) { case C1_2: set_tps_bits(35, 33, 0); break; case C2_3: set_tps_bits(35, 33, 1); break; case C3_4: set_tps_bits(35, 33, 2); break; case C5_6: set_tps_bits(35, 33, 3); break; case C7_8: set_tps_bits(35, 33, 4); break; default: set_tps_bits(35, 33, 0); break; } //Clause 4.6.2.8 set_tps_bits(37, 36, config.d_guard_interval); //Clause 4.6.2.9 set_tps_bits(39, 38, config.d_transmission_mode); //Clause 4.6.2.10 set_tps_bits(47, 40, config.d_cell_id); //These bits are set to zero set_tps_bits(53, 48, 0); //Clause 4.6.2.11 generate_bch_code(); } int dvbt_pilot_gen::process_tps_data(const gr_complex * in, const int diff_symbol_index) { int end_frame = 0; // Look for TPS data only - demodulate DBPSK // Calculate phase difference between previous symbol // and current one to determine the current bit. // Use majority voting for decision int tps_majority_zero = 0; for (int k = 0; k < d_tps_carriers_size; k++) { // Use equalizer to correct data and frequency correction gr_complex val = in[d_zeros_on_left + d_tps_carriers[k]] * d_channel_gain[d_tps_carriers[k]]; if (!d_symbol_index_known || (d_symbol_index != 0)) { gr_complex phdiff = val * conj(d_prev_tps_symbol[k]); if (phdiff.real() >= 0.0) { tps_majority_zero++; } else { tps_majority_zero--; } } d_prev_tps_symbol[k] = val; } // Insert obtained TPS bit into FIFO // Insert the same bit into FIFO in the case // diff_symbol_index is more than one. This will happen // in the case of losing 1 to 3 symbols. // This could be corrected by BCH decoder afterwards. for (int i = 0; i < diff_symbol_index; i++) { // Take out the front entry first d_rcv_tps_data.pop_front(); // Add data at tail if (!d_symbol_index_known || (d_symbol_index != 0)) { if (tps_majority_zero >= 0) { d_rcv_tps_data.push_back(0); } else { d_rcv_tps_data.push_back(1); } } else { d_rcv_tps_data.push_back(0); } } // Match synchronization signatures if (std::equal(d_rcv_tps_data.begin() + 1, d_rcv_tps_data.begin() + d_tps_sync_evenv.size(), d_tps_sync_evenv.begin())) { // Verify parity for TPS data if (!verify_bch_code(d_rcv_tps_data)) { d_frame_index = (d_rcv_tps_data[23] << 1) | (d_rcv_tps_data[24]); d_symbol_index_known = 1; end_frame = 1; } else { d_symbol_index_known = 0; end_frame = 0; } // Clear up FIFO for (int i = 0; i < d_symbols_per_frame; i++) { d_rcv_tps_data[i] = 0; } } else if (std::equal(d_rcv_tps_data.begin() + 1, d_rcv_tps_data.begin() + d_tps_sync_oddv.size(), d_tps_sync_oddv.begin())) { // Verify parity for TPS data if (!verify_bch_code(d_rcv_tps_data)) { d_frame_index = (d_rcv_tps_data[23] << 1) | (d_rcv_tps_data[24]); d_symbol_index_known = 1; end_frame = 1; } else { d_symbol_index_known = 0; end_frame = 0; } // Clear up FIFO for (int i = 0; i < d_symbols_per_frame; i++) { d_rcv_tps_data[i] = 0; } } return end_frame; } void dvbt_pilot_gen::set_chanestim_carrier(int k) { d_chanestim_carriers[d_chanestim_index] = k; } void dvbt_pilot_gen::advance_chanestim() { d_chanestim_index++; } int dvbt_pilot_gen::get_current_payload() { return d_payload_carriers[d_payload_index]; } void dvbt_pilot_gen::set_payload_carrier(int k) { d_payload_carriers[d_payload_index] = k; } void dvbt_pilot_gen::advance_payload() { d_payload_index++; } void dvbt_pilot_gen::process_payload_data(const gr_complex *in, gr_complex *out) { //reset indexes d_spilot_index = 0; d_cpilot_index = 0; d_tpilot_index = 0; d_payload_index = 0;d_chanestim_index = 0; int is_payload = 1; //process one block - one symbol for (int k = 0; k < (d_Kmax - d_Kmin + 1); k++) { is_payload = 1; // Keep data for channel estimation // This depends on the symbol index if (k == get_current_spilot(d_mod_symbol_index)) { advance_spilot(d_mod_symbol_index); is_payload = 0; } // Keep data for frequency correction // and channel estimation if (k == get_current_cpilot()) { advance_cpilot(); is_payload = 0; } if (k == get_current_tpilot()) { advance_tpilot(); is_payload = 0; } // Keep payload carrier number // This depends on the symbol index if (is_payload) { set_payload_carrier(k); advance_payload(); } } if (d_equalizer_ready) { // Equalize payload data according to channel estimator for (int i = 0; i < d_payload_index; i++) { out[i] = in[d_zeros_on_left + d_payload_carriers[i]] * d_channel_gain[d_payload_carriers[i]]; } } else { // If equ not ready, return 0 for (int i = 0; i < d_payload_length; i++) { out[0] = gr_complex(0.0, 0.0); } } } void dvbt_pilot_gen::update_output(const gr_complex *in, gr_complex *out) { int is_payload = 1; int payload_count = 0; //move to the next symbol //re-genereate TPS data format_tps_data(); //reset indexes payload_count = 0; d_spilot_index = 0; d_cpilot_index = 0; d_tpilot_index = 0; for (int i = 0; i < d_zeros_on_left; i++) { out[i] = gr_complex(0.0, 0.0); } //process one block - one symbol for (int k = d_Kmin; k < (d_Kmax - d_Kmin + 1); k++) { is_payload = 1; if (k == get_current_spilot(d_symbol_index)) { out[d_zeros_on_left + k] = get_spilot_value(k); advance_spilot(d_symbol_index); is_payload = 0; } if (k == get_current_cpilot()) { out[d_zeros_on_left + k] = get_cpilot_value(k); advance_cpilot(); is_payload = 0; } if (k == get_current_tpilot()) { out[d_zeros_on_left + k] = get_tpilot_value(k); advance_tpilot(); is_payload = 0; } if (is_payload == 1) { out[d_zeros_on_left + k] = in[payload_count++]; } } // update indexes if (++d_symbol_index == d_symbols_per_frame) { d_symbol_index = 0; if (++d_frame_index == d_frames_per_superframe) { d_frame_index = 0; d_superframe_index++; } } for (int i = (d_fft_length - d_zeros_on_right); i < d_fft_length; i++) { out[i] = gr_complex(0.0, 0.0); } } int dvbt_pilot_gen::parse_input(const gr_complex *in, gr_complex *out, int * symbol_index, int * frame_index) { d_trigger_index++; // Obtain frequency correction based on cpilots. // Obtain channel estimation based on both // cpilots and spilots. // We use spilot correlation for finding the symbol index modulo 4 // The diff between previous sym index and current index is used // to advance the symbol index inside a frame (0 to 67) // Then based on the TPS data we find out the start of a frame // Process cpilot data // This is post FFT integer frequency offset estimation // This is called before all other processing process_cpilot_data(in); // Compute one shot Post-FFT Carrier and Sampling Frequency Tracking // Obtain fractional Carrer and Sampling frequency corrections // Before this moment it is assumed to have corrected this: // - symbol timing (pre-FFT) // - symbol frequency correction (pre-FFT) // - integer frequency correction (post-FFT) // TODO - call this just in the aquisition mode compute_oneshot_csft(in); // Gather all corrections and obtain a corrected OFDM symbol: // - input symbol shift (post-FFT) // - integer frequency correction (post-FFT) // - fractional frequency (carrier and sampling) corrections (post-FFT) // TODO - use PI to update the corrections frequency_correction(in, d_derot_in); // Process spilot data // This is channel estimation function int diff_symbol_index = process_spilot_data(d_derot_in); // Correct symbol index so that all subsequent processing // use correct symbol index d_symbol_index = (d_symbol_index + diff_symbol_index) % d_symbols_per_frame; // Symbol index is used in other modules too *symbol_index = d_symbol_index; // Frame index is used in other modules too *frame_index = d_frame_index; // Process TPS data // If a frame is recognized then signal end of frame int frame_end = process_tps_data(d_derot_in, diff_symbol_index); // We are just at the end of a frame if (frame_end) { d_symbol_index = d_symbols_per_frame - 1; } // Process payload data with correct symbol index process_payload_data(d_derot_in, out); // noutput_items should be 1 in this case return 1; } dvbt_reference_signals::sptr dvbt_reference_signals::make(int itemsize, int ninput, int noutput, \ dvb_constellation_t constellation, dvbt_hierarchy_t hierarchy, \ dvb_code_rate_t code_rate_HP, dvb_code_rate_t code_rate_LP, \ dvb_guardinterval_t guard_interval, dvbt_transmission_mode_t transmission_mode, \ int include_cell_id, int cell_id) { return gnuradio::get_initial_sptr (new dvbt_reference_signals_impl(itemsize, ninput, \ noutput, constellation, hierarchy, code_rate_HP, code_rate_LP, \ guard_interval, transmission_mode, include_cell_id, cell_id)); } /* * The private constructor */ dvbt_reference_signals_impl::dvbt_reference_signals_impl(int itemsize, int ninput, int noutput, \ dvb_constellation_t constellation, dvbt_hierarchy_t hierarchy, dvb_code_rate_t code_rate_HP,\ dvb_code_rate_t code_rate_LP, dvb_guardinterval_t guard_interval,\ dvbt_transmission_mode_t transmission_mode, int include_cell_id, int cell_id) : block("dvbt_reference_signals", io_signature::make(1, 1, itemsize * ninput), io_signature::make(1, 1, itemsize * noutput)), config(constellation, hierarchy, code_rate_HP, code_rate_LP, \ guard_interval, transmission_mode, include_cell_id, cell_id), d_pg(config) { d_ninput = ninput; d_noutput = noutput; } /* * Our virtual destructor. */ dvbt_reference_signals_impl::~dvbt_reference_signals_impl() { } void dvbt_reference_signals_impl::forecast (int noutput_items, gr_vector_int &ninput_items_required) { ninput_items_required[0] = noutput_items; } int dvbt_reference_signals_impl::general_work (int noutput_items, gr_vector_int &ninput_items, gr_vector_const_void_star &input_items, gr_vector_void_star &output_items) { const gr_complex *in = (const gr_complex *) input_items[0]; gr_complex *out = (gr_complex *) output_items[0]; for (int i = 0; i < noutput_items; i++) { d_pg.update_output(&in[i * d_ninput], &out[i * d_noutput]); } // Tell runtime system how many input items we consumed on // each input stream. consume_each (noutput_items); // Tell runtime system how many output items we produced. return noutput_items; } } /* namespace dtv */ } /* namespace gr */
pinkavaj/gnuradio
gr-dtv/lib/dvbt/dvbt_reference_signals_impl.cc
C++
gpl-3.0
41,320
/* YUI 3.8.0 (build 5744) Copyright 2012 Yahoo! Inc. All rights reserved. Licensed under the BSD License. http://yuilibrary.com/license/ */ YUI.add('anim-scroll', function (Y, NAME) { /** * Adds support for the <code>scroll</code> property in <code>to</code> * and <code>from</code> attributes. * @module anim * @submodule anim-scroll */ var NUM = Number; //TODO: deprecate for scrollTop/Left properties? Y.Anim.behaviors.scroll = { set: function(anim, att, from, to, elapsed, duration, fn) { var node = anim._node, val = ([ fn(elapsed, NUM(from[0]), NUM(to[0]) - NUM(from[0]), duration), fn(elapsed, NUM(from[1]), NUM(to[1]) - NUM(from[1]), duration) ]); if (val[0]) { node.set('scrollLeft', val[0]); } if (val[1]) { node.set('scrollTop', val[1]); } }, get: function(anim) { var node = anim._node; return [node.get('scrollLeft'), node.get('scrollTop')]; } }; }, '3.8.0', {"requires": ["anim-base"]});
relipse/cworklog
public_html/js/yui/3.8.0/build/anim-scroll/anim-scroll.js
JavaScript
gpl-3.0
1,067
/** * * Spacebrew Library for Javascript * -------------------------------- * * This library was designed to work on front-end (browser) envrionments, and back-end (server) * environments. Please refer to the readme file, the documentation and examples to learn how to * use this library. * * Spacebrew is an open, dynamically re-routable software toolkit for choreographing interactive * spaces. Or, in other words, a simple way to connect interactive things to one another. Learn * more about Spacebrew here: http://docs.spacebrew.cc/ * * To import into your web apps, we recommend using the minimized version of this library. * * Latest Updates: * - added blank "options" attribute to config message - for future use * - caps number of messages sent to 60 per second * - reconnect to spacebrew if connection lost * - enable client apps to extend libs with admin functionality. * - added close method to close Spacebrew connection. * * @author Brett Renfer and Julio Terra from LAB @ Rockwell Group * @filename sb-1.3.0.js * @version 1.3.0 * @date May 7, 2013 * */ /** * Check if Bind method exists in current enviroment. If not, it creates an implementation of * this useful method. */ if (!Function.prototype.bind) { Function.prototype.bind = function (oThis) { if (typeof this !== "function") { // closest thing possible to the ECMAScript 5 internal IsCallable function throw new TypeError("Function.prototype.bind - what is trying to be bound is not callable"); } var aArgs = Array.prototype.slice.call(arguments, 1), fToBind = this, fNOP = function () {}, fBound = function () { return fToBind.apply(this instanceof fNOP ? this : oThis || window, aArgs.concat(Array.prototype.slice.call(arguments))); }; fNOP.prototype = this.prototype; fBound.prototype = new fNOP(); return fBound; }; } /** * @namespace for Spacebrew library */ var Spacebrew = Spacebrew || {}; /** * create placeholder var for WebSocket object, if it does not already exist */ var WebSocket = WebSocket || {}; /** * Check if Running in Browser or Server (Node) Environment * */ // check if window object already exists to determine if running browswer var window = window || undefined; // check if module object already exists to determine if this is a node application var module = module || undefined; // if app is running in a browser, then define the getQueryString method if (window) { if (!window['getQueryString']){ /** * Get parameters from a query string * @param {String} name Name of query string to parse (w/o '?' or '&') * @return {String} value of parameter (or empty string if not found) */ window.getQueryString = function( name ) { if (!window.location) return; name = name.replace(/[\[]/,"\\\[").replace(/[\]]/,"\\\]"); var regexS = "[\\?&]"+name+"=([^&#]*)"; var regex = new RegExp( regexS ); var results = regex.exec( window.location.href ); if( results == null ) return ""; else return results[1]; } } } // if app is running in a node server environment then package Spacebrew library as a module. // WebSocket module (ws) needs to be saved in a node_modules so that it can be imported. if (!window && module) { WebSocket = require("ws"); module.exports = { Spacebrew: Spacebrew } } /** * Define the Spacebrew Library * */ /** * Spacebrew client! * @constructor * @param {String} server (Optional) Base address of Spacebrew server. This server address is overwritten if server defined in query string; defaults to localhost. * @param {String} name (Optional) Base name of app. Base name is overwritten if "name" is defined in query string; defaults to window.location.href. * @param {String} description (Optional) Base description of app. Description name is overwritten if "description" is defined in query string; * @param {Object} options (Optional) An object that holds the optional parameters described below * port (Optional) Port number for the Spacebrew server * admin (Optional) Flag that identifies when app should register for admin privileges with server * debug (Optional) Debug flag that turns on info and debug messaging (limited use) */ Spacebrew.Client = function( server, name, description, options ){ var options = options || {}; // check if the server variable is an object that holds all config values if (server != undefined) { if (toString.call(server) !== '[object String]') { options.port = server.port || undefined; options.debug = server.debug || false; options.reconnect = server.reconnect || false; description = server.description || undefined; name = server.name || undefined; server = server.server || undefined; } } this.debug = (window.getQueryString('debug') === "true" ? true : (options.debug || false)); this.reconnect = options.reconnect || true; this.reconnect_timer = undefined; this.send_interval = 16; this.send_blocked = false; this.msg = {}; /** * Name of app * @type {String} */ this._name = name || "javascript client #"; if (window) { this._name = (window.getQueryString('name') !== "" ? unescape(window.getQueryString('name')) : this._name); } /** * Description of your app * @type {String} */ this._description = description || "spacebrew javascript client"; if (window) { this._description = (window.getQueryString('description') !== "" ? unescape(window.getQueryString('description')) : this._description); } /** * Spacebrew server to which the app will connect * @type {String} */ this.server = server || "sandbox.spacebrew.cc"; if (window) { this.server = (window.getQueryString('server') !== "" ? unescape(window.getQueryString('server')) : this.server); } /** * Port number on which Spacebrew server is running * @type {Integer} */ this.port = options.port || 9000; if (window) { port = window.getQueryString('port'); if (port !== "" && !isNaN(port)) { this.port = port; } } /** * Reference to WebSocket * @type {WebSocket} */ this.socket = null; /** * Configuration file for Spacebrew * @type {Object} */ this.client_config = { name: this._name, description: this._description, publish:{ messages:[] }, subscribe:{ messages:[] }, options:{} }; this.admin = {} /** * Are we connected to a Spacebrew server? * @type {Boolean} */ this._isConnected = false; } /** * Connect to Spacebrew * @memberOf Spacebrew.Client */ Spacebrew.Client.prototype.connect = function(){ try { this.socket = new WebSocket("ws://" + this.server + ":" + this.port); this.socket.onopen = this._onOpen.bind(this); this.socket.onmessage = this._onMessage.bind(this); this.socket.onclose = this._onClose.bind(this); } catch(e){ this._isConnected = false; console.log("[connect:Spacebrew] connection attempt failed") } } /** * Close Spacebrew connection * @memberOf Spacebrew.Client */ Spacebrew.Client.prototype.close = function(){ try { if (this._isConnected) { this.socket.close(); this._isConnected = false; console.log("[close:Spacebrew] closing websocket connection") } } catch (e) { this._isConnected = false; } } /** * Override in your app to receive on open event for connection * @memberOf Spacebrew.Client * @public */ Spacebrew.Client.prototype.onOpen = function( name, value ){} /** * Override in your app to receive on close event for connection * @memberOf Spacebrew.Client * @public */ Spacebrew.Client.prototype.onClose = function( name, value ){} /** * Override in your app to receive "range" messages, e.g. sb.onRangeMessage = yourRangeFunction * @param {String} name Name of incoming route * @param {String} value [description] * @memberOf Spacebrew.Client * @public */ Spacebrew.Client.prototype.onRangeMessage = function( name, value ){} /** * Override in your app to receive "boolean" messages, e.g. sb.onBooleanMessage = yourBoolFunction * @param {String} name Name of incoming route * @param {String} value [description] * @memberOf Spacebrew.Client * @public */ Spacebrew.Client.prototype.onBooleanMessage = function( name, value ){} /** * Override in your app to receive "string" messages, e.g. sb.onStringMessage = yourStringFunction * @param {String} name Name of incoming route * @param {String} value [description] * @memberOf Spacebrew.Client * @public */ Spacebrew.Client.prototype.onStringMessage = function( name, value ){} /** * Override in your app to receive "custom" messages, e.g. sb.onCustomMessage = yourStringFunction * @param {String} name Name of incoming route * @param {String} value [description] * @memberOf Spacebrew.Client * @public */ Spacebrew.Client.prototype.onCustomMessage = function( name, value, type ){} /** * Add a route you are publishing on * @param {String} name Name of incoming route * @param {String} type "boolean", "range", or "string" * @param {String} def default value * @memberOf Spacebrew.Client * @public */ Spacebrew.Client.prototype.addPublish = function( name, type, def ){ this.client_config.publish.messages.push({"name":name, "type":type, "default":def}); this.updatePubSub(); } /** * [addSubscriber description] * @param {String} name Name of outgoing route * @param {String} type "boolean", "range", or "string" * @memberOf Spacebrew.Client * @public */ Spacebrew.Client.prototype.addSubscribe = function( name, type ){ this.client_config.subscribe.messages.push({"name":name, "type":type }); this.updatePubSub(); } /** * Update publishers and subscribers * @memberOf Spacebrew.Client * @private */ Spacebrew.Client.prototype.updatePubSub = function(){ if (this._isConnected) { this.socket.send(JSON.stringify({"config": this.client_config})); } } /** * Send a route to Spacebrew * @param {String} name Name of outgoing route (must match something in addPublish) * @param {String} type "boolean", "range", or "string" * @param {String} value Value to send * @memberOf Spacebrew.Client * @public */ Spacebrew.Client.prototype.send = function( name, type, value ){ var self = this; this.msg = { "message": { "clientName":this._name, "name": name, "type": type, "value": value } } // if send block is not active then send message if (!this.send_blocked) { this.socket.send(JSON.stringify(this.msg)); this.send_blocked = true; this.msg = undefined; // set the timer to unblock message sending setTimeout(function() { self.send_blocked = false; // remove send block if (self.msg != undefined) { // if message exists then sent it self.send(self.msg.message.name, self.msg.message.type, self.msg.message.value); } }, self.send_interval); } } /** * Called on WebSocket open * @private * @memberOf Spacebrew.Client */ Spacebrew.Client.prototype._onOpen = function() { console.log("[_onOpen:Spacebrew] Spacebrew connection opened, client name is: " + this._name); this._isConnected = true; if (this.admin.active) this.connectAdmin(); // if reconnect functionality is activated then clear interval timer when connection succeeds if (this.reconnect_timer) { console.log("[_onOpen:Spacebrew] tearing down reconnect timer") this.reconnect_timer = clearInterval(this.reconnect_timer); this.reconnect_timer = undefined; } // send my config this.updatePubSub(); this.onOpen(); } /** * Called on WebSocket message * @private * @param {Object} e * @memberOf Spacebrew.Client */ Spacebrew.Client.prototype._onMessage = function( e ){ var data = JSON.parse(e.data) , name , type , value ; // handle client messages if (data["message"]) { // check to make sure that this is not an admin message if (!data.message["clientName"]) { name = data.message.name; type = data.message.type; value = data.message.value; switch( type ){ case "boolean": this.onBooleanMessage( name, value == "true" ); break; case "string": this.onStringMessage( name, value ); break; case "range": this.onRangeMessage( name, Number(value) ); break; default: this.onCustomMessage( name, value, type ); } } } // handle admin messages else { if (this.admin.active) { this._handleAdminMessages( data ); } } } /** * Called on WebSocket close * @private * @memberOf Spacebrew.Client */ Spacebrew.Client.prototype._onClose = function() { var self = this; console.log("[_onClose:Spacebrew] Spacebrew connection closed"); this._isConnected = false; if (this.admin.active) this.admin.remoteAddress = undefined; // if reconnect functionality is activated set interval timer if connection dies if (this.reconnect && !this.reconnect_timer) { console.log("[_onClose:Spacebrew] setting up reconnect timer"); this.reconnect_timer = setInterval(function () { if (self.isConnected != false) { self.connect(); console.log("[reconnect:Spacebrew] attempting to reconnect to spacebrew"); } }, 5000); } this.onClose(); }; /** * name Method that sets or gets the spacebrew app name. If parameter is provided then it sets the name, otherwise * it just returns the current app name. * @param {String} newName New name of the spacebrew app * @return {String} Returns the name of the spacebrew app if called as a getter function. If called as a * setter function it will return false if the method is called after connecting to spacebrew, * because the name must be configured before connection is made. */ Spacebrew.Client.prototype.name = function (newName){ if (newName) { // if a name has been passed in then update it if (this._isConnected) return false; // if already connected we can't update name this._name = newName; if (window) { this._name = (window.getQueryString('name') !== "" ? unescape(window.getQueryString('name')) : this._name); } this.client_config.name = this._name; // update spacebrew config file } return this._name; }; /** * name Method that sets or gets the spacebrew app description. If parameter is provided then it sets the description, * otherwise it just returns the current app description. * @param {String} newDesc New description of the spacebrew app * @return {String} Returns the description of the spacebrew app if called as a getter function. If called as a * setter function it will return false if the method is called after connecting to spacebrew, * because the description must be configured before connection is made. */ Spacebrew.Client.prototype.description = function (newDesc){ if (newDesc) { // if a description has been passed in then update it if (this._isConnected) return false; // if already connected we can't update description this._description = newDesc || "spacebrew javascript client"; if (window) { this._description = (window.getQueryString('description') !== "" ? unescape(window.getQueryString('description')) : this._description); } this.client_config.description = this._description; // update spacebrew config file } return this._description; }; /** * isConnected Method that returns current connection state of the spacebrew client. * @return {Boolean} Returns true if currently connected to Spacebrew */ Spacebrew.Client.prototype.isConnected = function (){ return this._isConnected; }; Spacebrew.Client.prototype.extend = function ( mixin ) { for (var prop in mixin) { if (mixin.hasOwnProperty(prop)) { this[prop] = mixin[prop]; } } };
RyanteckLTD/RTK-000-001-Controller
touchClient/js/sb-1.3.0.js
JavaScript
gpl-3.0
15,832
#region License // Copyright (c) 2013, ClearCanvas Inc. // All rights reserved. // http://www.clearcanvas.ca // // This file is part of the ClearCanvas RIS/PACS open source project. // // The ClearCanvas RIS/PACS open source project is free software: you can // redistribute it and/or modify it under the terms of the GNU General Public // License as published by the Free Software Foundation, either version 3 of the // License, or (at your option) any later version. // // The ClearCanvas RIS/PACS open source project is distributed in the hope that it // will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General // Public License for more details. // // You should have received a copy of the GNU General Public License along with // the ClearCanvas RIS/PACS open source project. If not, see // <http://www.gnu.org/licenses/>. #endregion using System; using System.Security; using System.Text; using System.Text.RegularExpressions; using System.Xml; using ClearCanvas.Common; using ClearCanvas.Dicom.Utilities.Xml; namespace ClearCanvas.Dicom.Samples { public class EditSop { private readonly string _sourceFilename; private DicomFile _dicomFile; public EditSop(string file) { _sourceFilename = file; } public DicomFile DicomFile { get { return _dicomFile; } } public void Load() { _dicomFile = new DicomFile(_sourceFilename); try { _dicomFile.Load(); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception loading DICOM file: {0}", _sourceFilename); } } public string GetXmlRepresentation() { var theDoc = GetXmlDoc(); var xmlSettings = new XmlWriterSettings { Encoding = Encoding.UTF8, ConformanceLevel = ConformanceLevel.Document, Indent = true, NewLineOnAttributes = false, CheckCharacters = true, IndentChars = " " }; StringBuilder sb = new StringBuilder(); XmlWriter tw = XmlWriter.Create(sb, xmlSettings); theDoc.WriteTo(tw); tw.Flush(); tw.Close(); return sb.ToString(); } private XmlDocument GetXmlDoc() { var theDocument = new XmlDocument(); XmlElement instance = theDocument.CreateElement("Instance"); XmlAttribute sopInstanceUid = theDocument.CreateAttribute("UID"); sopInstanceUid.Value = _dicomFile.MediaStorageSopInstanceUid; instance.Attributes.Append(sopInstanceUid); XmlAttribute sopClassAttribute = theDocument.CreateAttribute("SopClassUID"); sopClassAttribute.Value = _dicomFile.SopClass.Uid; instance.Attributes.Append(sopClassAttribute); theDocument.AppendChild(instance); foreach (DicomAttribute attribute in _dicomFile.DataSet) { XmlElement instanceElement = CreateDicomAttributeElement(theDocument, attribute.Tag, "Attribute"); if (attribute is DicomAttributeSQ || attribute is DicomAttributeOW || attribute is DicomAttributeUN || attribute is DicomAttributeOF || attribute is DicomAttributeOB) { continue; } instanceElement.InnerText = XmlEscapeString(attribute); instance.AppendChild(instanceElement); } return theDocument; } private static XmlElement CreateDicomAttributeElement(XmlDocument document, DicomTag dicomTag, string name) { XmlElement dicomAttributeElement = document.CreateElement(name); XmlAttribute tag = document.CreateAttribute("Tag"); tag.Value = "$" + dicomTag.VariableName; XmlAttribute vr = document.CreateAttribute("VR"); vr.Value = dicomTag.VR.ToString(); dicomAttributeElement.Attributes.Append(tag); dicomAttributeElement.Attributes.Append(vr); return dicomAttributeElement; } private static string XmlEscapeString(string input) { string result = input ?? string.Empty; result = SecurityElement.Escape(result); // Do the regular expression to escape out other invalid XML characters in the string not caught by the above. // NOTE: the \x sequences you see below are C# escapes, not Regex escapes result = Regex.Replace(result, "[^\x9\xA\xD\x20-\xFFFD]", m => string.Format("&#x{0:X};", (int)m.Value[0])); return result; } public void UpdateTags(string xml) { var theDoc = new XmlDocument(); try { theDoc.LoadXml(xml); var instanceXml = new InstanceXml(theDoc.DocumentElement, null); DicomAttributeCollection queryMessage = instanceXml.Collection; if (queryMessage == null) { Platform.Log(LogLevel.Error, "Unexpected error parsing move message"); return; } foreach (var attribute in queryMessage) { _dicomFile.DataSet[attribute.Tag] = attribute.Copy(); } } catch (Exception x) { Platform.Log(LogLevel.Error, x, "Unable to perform update"); } } public void Save(string filename) { try { _dicomFile.Save(filename); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception saving dicom file: {0}", filename); } } } }
testdoron/ClearCanvas
Dicom/Samples/EditSop.cs
C#
gpl-3.0
6,331
// Copyright 2019 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package netutil import "net" // AddrIP gets the IP address contained in addr. It returns nil if no address is present. func AddrIP(addr net.Addr) net.IP { switch a := addr.(type) { case *net.IPAddr: return a.IP case *net.TCPAddr: return a.IP case *net.UDPAddr: return a.IP default: return nil } }
status-im/status-go
vendor/github.com/ethereum/go-ethereum/p2p/netutil/addrutil.go
GO
mpl-2.0
1,106
<?php if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point'); /********************************************************************************* * SugarCRM Community Edition is a customer relationship management program developed by * SugarCRM, Inc. Copyright (C) 2004-2012 SugarCRM Inc. * * This program is free software; you can redistribute it and/or modify it under * the terms of the GNU Affero General Public License version 3 as published by the * Free Software Foundation with the addition of the following permission added * to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK * IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY * OF NON INFRINGEMENT OF THIRD PARTY RIGHTS. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more * details. * * You should have received a copy of the GNU Affero General Public License along with * this program; if not, see http://www.gnu.org/licenses or write to the Free * Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301 USA. * * You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road, * SW2-130, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com. * * The interactive user interfaces in modified source and object code versions * of this program must display Appropriate Legal Notices, as required under * Section 5 of the GNU Affero General Public License version 3. * * In accordance with Section 7(b) of the GNU Affero General Public License version 3, * these Appropriate Legal Notices must retain the display of the "Powered by * SugarCRM" logo. If the display of the logo is not reasonably feasible for * technical reasons, the Appropriate Legal Notices must display the words * "Powered by SugarCRM". ********************************************************************************/ /********************************************************************************* * Portions created by SugarCRM are Copyright (C) SugarCRM, Inc. * All Rights Reserved. * Contributor(s): ______________________________________.. ********************************************************************************/ $module_name = 'lm_FaxMan'; $_module_name = 'lm_faxman'; $searchFields['lm_FaxMan'] = array ( 'document_name' => array( 'query_type'=>'default'), 'category_id'=> array('query_type'=>'default', 'options' => 'document_category_dom', 'template_var' => 'CATEGORY_OPTIONS'), 'subcategory_id'=> array('query_type'=>'default', 'options' => 'document_subcategory_dom', 'template_var' => 'SUBCATEGORY_OPTIONS'), 'active_date'=> array('query_type'=>'default'), 'exp_date'=> array('query_type'=>'default'), //Range Search Support 'range_date_entered' => array ('query_type' => 'default', 'enable_range_search' => true, 'is_date_field' => true), 'start_range_date_entered' => array ('query_type' => 'default', 'enable_range_search' => true, 'is_date_field' => true), 'end_range_date_entered' => array ('query_type' => 'default', 'enable_range_search' => true, 'is_date_field' => true), 'range_date_modified' => array ('query_type' => 'default', 'enable_range_search' => true, 'is_date_field' => true), 'start_range_date_modified' => array ('query_type' => 'default', 'enable_range_search' => true, 'is_date_field' => true), 'end_range_date_modified' => array ('query_type' => 'default', 'enable_range_search' => true, 'is_date_field' => true), //Range Search Support ); ?>
KadJ/amazon_sugar
modules/lm_FaxMan/metadata/SearchFields.php
PHP
agpl-3.0
3,742
<?php if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point'); /********************************************************************************* * SugarCRM Community Edition is a customer relationship management program developed by * SugarCRM, Inc. Copyright (C) 2004-2012 SugarCRM Inc. * * This program is free software; you can redistribute it and/or modify it under * the terms of the GNU Affero General Public License version 3 as published by the * Free Software Foundation with the addition of the following permission added * to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK * IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY * OF NON INFRINGEMENT OF THIRD PARTY RIGHTS. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more * details. * * You should have received a copy of the GNU Affero General Public License along with * this program; if not, see http://www.gnu.org/licenses or write to the Free * Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301 USA. * * You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road, * SW2-130, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com. * * The interactive user interfaces in modified source and object code versions * of this program must display Appropriate Legal Notices, as required under * Section 5 of the GNU Affero General Public License version 3. * * In accordance with Section 7(b) of the GNU Affero General Public License version 3, * these Appropriate Legal Notices must retain the display of the "Powered by * SugarCRM" logo. If the display of the logo is not reasonably feasible for * technical reasons, the Appropriate Legal Notices must display the words * "Powered by SugarCRM". ********************************************************************************/ function additionalDetailsMeeting($fields) { static $mod_strings; if(empty($mod_strings)) { global $current_language; $mod_strings = return_module_language($current_language, 'Meetings'); } $overlib_string = ''; //Modify by jchi 6/27/2008 1515pm china time , bug 20626. if(!empty($fields['DATE_START'])) $overlib_string .= '<b>'. $mod_strings['LBL_DATE_TIME'] . '</b> ' . $fields['DATE_START'] . ' <br>'; if(isset($fields['DURATION_HOURS']) || isset($fields['DURATION_MINUTES'])) { $overlib_string .= '<b>'. $mod_strings['LBL_DURATION'] . '</b> '; if(isset($fields['DURATION_HOURS'])) { $overlib_string .= $fields['DURATION_HOURS'] . $mod_strings['LBL_HOURS_ABBREV'] . ' '; } if(isset($fields['DURATION_MINUTES'])) { $overlib_string .= $fields['DURATION_MINUTES'] . $mod_strings['LBL_MINSS_ABBREV']; } $overlib_string .= '<br>'; } if (!empty($fields['PARENT_ID'])) { $overlib_string .= "<b>". $mod_strings['LBL_RELATED_TO'] . "</b> ". "<a href='index.php?module=".$fields['PARENT_TYPE']."&action=DetailView&record=".$fields['PARENT_ID']."'>". $fields['PARENT_NAME'] . "</a>"; $overlib_string .= '<br>'; } if(!empty($fields['DESCRIPTION'])) { $overlib_string .= '<b>'. $mod_strings['LBL_DESCRIPTION'] . '</b> ' . substr($fields['DESCRIPTION'], 0, 300); if(strlen($fields['DESCRIPTION']) > 300) $overlib_string .= '...'; $overlib_string .= '<br>'; } $editLink = "index.php?action=EditView&module=Meetings&record={$fields['ID']}"; $viewLink = "index.php?action=DetailView&module=Meetings&record={$fields['ID']}"; return array('fieldToAddTo' => 'NAME', 'string' => $overlib_string, 'editLink' => $editLink, 'viewLink' => $viewLink); } ?>
KadJ/amazon_sugar
upload/upload/upgrades/patch/SugarCE-Upgrade-6.5.x-to-6.5.9-restore/modules/Meetings/metadata/additionalDetails.php
PHP
agpl-3.0
3,933
package crazypants.enderio.machine.capbank.network; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.IInventory; import net.minecraft.item.ItemStack; import cofh.api.energy.IEnergyContainerItem; import crazypants.enderio.EnderIO; import crazypants.enderio.machine.capbank.TileCapBank; public class InventoryImpl implements IInventory { public static boolean isInventoryEmtpy(TileCapBank cap) { for (ItemStack st : cap.getInventory()) { if(st != null) { return false; } } return true; } public static boolean isInventoryEmtpy(ItemStack[] inv) { if(inv == null) { return true; } for (ItemStack st : inv) { if(st != null) { return false; } } return true; } private ItemStack[] inventory; private TileCapBank capBank; public InventoryImpl() { } public TileCapBank getCapBank() { return capBank; } public void setCapBank(TileCapBank cap) { capBank = cap; if(cap == null) { inventory = null; return; } inventory = cap.getInventory(); } public boolean isEmtpy() { return isInventoryEmtpy(inventory); } public ItemStack[] getStacks() { return inventory; } @Override public ItemStack getStackInSlot(int slot) { if(inventory == null) { return null; } if(slot < 0 || slot >= inventory.length) { return null; } return inventory[slot]; } @Override public ItemStack decrStackSize(int fromSlot, int amount) { if(inventory == null) { return null; } if(fromSlot < 0 || fromSlot >= inventory.length) { return null; } ItemStack item = inventory[fromSlot]; if(item == null) { return null; } if(item.stackSize <= amount) { ItemStack result = item.copy(); inventory[fromSlot] = null; return result; } item.stackSize -= amount; return item.copy(); } @Override public void setInventorySlotContents(int slot, ItemStack itemstack) { if(inventory == null) { return; } if(slot < 0 || slot >= inventory.length) { return; } inventory[slot] = itemstack; } @Override public int getSizeInventory() { return 4; } //--- constant values @Override public ItemStack getStackInSlotOnClosing(int p_70304_1_) { return null; } @Override public String getInventoryName() { return EnderIO.blockCapBank.getUnlocalizedName() + ".name"; } @Override public boolean hasCustomInventoryName() { return false; } @Override public int getInventoryStackLimit() { return 1; } @Override public boolean isUseableByPlayer(EntityPlayer p_70300_1_) { return true; } @Override public boolean isItemValidForSlot(int slot, ItemStack itemstack) { if(itemstack == null) { return false; } return itemstack.getItem() instanceof IEnergyContainerItem; } @Override public void openInventory() { } @Override public void closeInventory() { } @Override public void markDirty() { } }
Samernieve/EnderIO
src/main/java/crazypants/enderio/machine/capbank/network/InventoryImpl.java
Java
unlicense
3,083
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Apache.Ignite.Core.Impl.Messaging { using System; using System.Diagnostics; using Apache.Ignite.Core.Binary; using Apache.Ignite.Core.Impl.Binary; using Apache.Ignite.Core.Impl.Binary.IO; using Apache.Ignite.Core.Impl.Common; using Apache.Ignite.Core.Impl.Handle; using Apache.Ignite.Core.Impl.Resource; using Apache.Ignite.Core.Messaging; /// <summary> /// Non-generic binary message listener wrapper. /// </summary> internal class MessageListenerHolder : IBinaryWriteAware, IHandle { /** Invoker function that takes key and value and invokes wrapped IMessageListener */ private readonly Func<Guid, object, bool> _invoker; /** Current Ignite instance. */ private readonly Ignite _ignite; /** Underlying filter. */ private readonly object _filter; /// <summary> /// Initializes a new instance of the <see cref="MessageListenerHolder" /> class. /// </summary> /// <param name="grid">Grid.</param> /// <param name="filter">The <see cref="IMessageListener{T}" /> to wrap.</param> /// <param name="invoker">The invoker func that takes key and value and invokes wrapped IMessageListener.</param> private MessageListenerHolder(Ignite grid, object filter, Func<Guid, object, bool> invoker) { Debug.Assert(filter != null); Debug.Assert(invoker != null); _invoker = invoker; _filter = filter; // 1. Set fields. Debug.Assert(grid != null); _ignite = grid; _invoker = invoker; // 2. Perform injections. ResourceProcessor.Inject(filter, grid); } /// <summary> /// Invoke the filter. /// </summary> /// <param name="input">Input.</param> /// <returns></returns> public int Invoke(IBinaryStream input) { var rawReader = _ignite.Marshaller.StartUnmarshal(input).GetRawReader(); var nodeId = rawReader.ReadGuid(); Debug.Assert(nodeId != null); return _invoker(nodeId.Value, rawReader.ReadObject<object>()) ? 1 : 0; } /// <summary> /// Wrapped <see cref="IMessageListener{T}" />. /// </summary> public object Filter { get { return _filter; } } /// <summary> /// Destroy callback. /// </summary> public Action DestroyAction { private get; set; } /** <inheritDoc /> */ public void Release() { if (DestroyAction != null) DestroyAction(); } /** <inheritDoc /> */ public bool Released { get { return false; } // Multiple releases are allowed. } /// <summary> /// Creates local holder instance. /// </summary> /// <param name="grid">Ignite instance.</param> /// <param name="listener">Filter.</param> /// <returns> /// New instance of <see cref="MessageListenerHolder" /> /// </returns> public static MessageListenerHolder CreateLocal<T>(Ignite grid, IMessageListener<T> listener) { Debug.Assert(listener != null); return new MessageListenerHolder(grid, listener, (id, msg) => listener.Invoke(id, (T)msg)); } /// <summary> /// Creates remote holder instance. /// </summary> /// <param name="grid">Grid.</param> /// <param name="memPtr">Memory pointer.</param> /// <returns>Deserialized instance of <see cref="MessageListenerHolder"/></returns> public static MessageListenerHolder CreateRemote(Ignite grid, long memPtr) { Debug.Assert(grid != null); using (var stream = IgniteManager.Memory.Get(memPtr).GetStream()) { return grid.Marshaller.Unmarshal<MessageListenerHolder>(stream); } } /// <summary> /// Gets the invoker func. /// </summary> private static Func<Guid, object, bool> GetInvoker(object pred) { var func = DelegateTypeDescriptor.GetMessageListener(pred.GetType()); return (id, msg) => func(pred, id, msg); } /** <inheritdoc /> */ public void WriteBinary(IBinaryWriter writer) { var writer0 = (BinaryWriter)writer.GetRawWriter(); writer0.WithDetach(w => w.WriteObject(Filter)); } /// <summary> /// Initializes a new instance of the <see cref="MessageListenerHolder"/> class. /// </summary> /// <param name="reader">The reader.</param> public MessageListenerHolder(IBinaryReader reader) { var reader0 = (BinaryReader)reader.GetRawReader(); _filter = reader0.ReadObject<object>(); _invoker = GetInvoker(_filter); _ignite = reader0.Marshaller.Ignite; ResourceProcessor.Inject(_filter, _ignite); } } }
tkpanther/ignite
modules/platforms/dotnet/Apache.Ignite.Core/Impl/Messaging/MessageListenerHolder.cs
C#
apache-2.0
5,939
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.zookeeper.server.quorum; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.junit.Assert.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.net.Socket; import java.nio.ByteBuffer; import java.nio.channels.SelectableChannel; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.SocketChannel; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.apache.jute.InputArchive; import org.apache.jute.OutputArchive; import org.apache.zookeeper.MockPacket; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.proto.ConnectRequest; import org.apache.zookeeper.proto.ReplyHeader; import org.apache.zookeeper.proto.RequestHeader; import org.apache.zookeeper.proto.SetWatches; import org.apache.zookeeper.server.MockNIOServerCnxn; import org.apache.zookeeper.server.NIOServerCnxn; import org.apache.zookeeper.server.NIOServerCnxnFactory; import org.apache.zookeeper.server.MockSelectorThread; import org.apache.zookeeper.server.ZKDatabase; import org.apache.zookeeper.server.ZooTrace; import org.apache.zookeeper.server.persistence.FileTxnSnapLog; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Demonstrate ZOOKEEPER-1382 : Watches leak on expired session */ @RunWith(Parameterized.class) public class WatchLeakTest { protected static final Logger LOG = LoggerFactory .getLogger(WatchLeakTest.class); final long SESSION_ID = 0xBABEL; private final boolean sessionTimedout; public WatchLeakTest(boolean sessionTimedout) { this.sessionTimedout = sessionTimedout; } @Parameters public static Collection<Object[]> configs() { return Arrays.asList(new Object[][] { { false }, { true }, }); } /** * Check that if session has expired then no watch can be set */ @Test public void testWatchesLeak() throws Exception { NIOServerCnxnFactory serverCnxnFactory = mock(NIOServerCnxnFactory.class); final SelectionKey sk = new FakeSK(); MockSelectorThread selectorThread = mock(MockSelectorThread.class); when(selectorThread.addInterestOpsUpdateRequest(any(SelectionKey.class))).thenAnswer(new Answer<Boolean>() { @Override public Boolean answer(InvocationOnMock invocation) throws Throwable { SelectionKey sk = (SelectionKey)invocation.getArguments()[0]; NIOServerCnxn nioSrvCnx = (NIOServerCnxn)sk.attachment(); sk.interestOps(nioSrvCnx.getInterestOps()); return true; } }); ZKDatabase database = new ZKDatabase(null); database.setlastProcessedZxid(2L); QuorumPeer quorumPeer = mock(QuorumPeer.class); FileTxnSnapLog logfactory = mock(FileTxnSnapLog.class); // Directories are not used but we need it to avoid NPE when(logfactory.getDataDir()).thenReturn(new File("")); when(logfactory.getSnapDir()).thenReturn(new File("")); FollowerZooKeeperServer fzks = null; try { // Create a new follower fzks = new FollowerZooKeeperServer(logfactory, quorumPeer, database); fzks.startup(); fzks.setServerCnxnFactory(serverCnxnFactory); quorumPeer.follower = new MyFollower(quorumPeer, fzks); LOG.info("Follower created"); // Simulate a socket channel between a client and a follower final SocketChannel socketChannel = createClientSocketChannel(); // Create the NIOServerCnxn that will handle the client requests final MockNIOServerCnxn nioCnxn = new MockNIOServerCnxn(fzks, socketChannel, sk, serverCnxnFactory, selectorThread); sk.attach(nioCnxn); // Send the connection request as a client do nioCnxn.doIO(sk); LOG.info("Client connection sent"); // Send the valid or invalid session packet to the follower QuorumPacket qp = createValidateSessionPacketResponse(!sessionTimedout); quorumPeer.follower.processPacket(qp); LOG.info("Session validation sent"); // OK, now the follower knows that the session is valid or invalid, let's try // to send the watches nioCnxn.doIO(sk); // wait for the the request processor to do his job Thread.sleep(1000L); LOG.info("Watches processed"); // If session has not been validated, there must be NO watches int watchCount = database.getDataTree().getWatchCount(); if (sessionTimedout) { // Session has not been re-validated ! LOG.info("session is not valid, watches = {}", watchCount); assertEquals("Session is not valid so there should be no watches", 0, watchCount); } else { // Session has been re-validated LOG.info("session is valid, watches = {}", watchCount); assertEquals("Session is valid so the watch should be there", 1, watchCount); } } finally { if (fzks != null) { fzks.shutdown(); } } } /** * A follower with no real leader connection */ public static class MyFollower extends Follower { /** * Create a follower with a mocked leader connection * * @param self * @param zk */ MyFollower(QuorumPeer self, FollowerZooKeeperServer zk) { super(self, zk); leaderOs = mock(OutputArchive.class); leaderIs = mock(InputArchive.class); bufferedOutput = mock(BufferedOutputStream.class); } } /** * Simulate the behavior of a real selection key */ private static class FakeSK extends SelectionKey { @Override public SelectableChannel channel() { return null; } @Override public Selector selector() { return mock(Selector.class); } @Override public boolean isValid() { return true; } @Override public void cancel() { } @Override public int interestOps() { return ops; } private int ops = OP_WRITE + OP_READ; @Override public SelectionKey interestOps(int ops) { this.ops = ops; return this; } @Override public int readyOps() { boolean reading = (ops & OP_READ) != 0; boolean writing = (ops & OP_WRITE) != 0; if (reading && writing) { LOG.info("Channel is ready for reading and writing"); } else if (reading) { LOG.info("Channel is ready for reading only"); } else if (writing) { LOG.info("Channel is ready for writing only"); } return ops; } } /** * Create a watches message with a single watch on / * * @return a message that attempts to set 1 watch on / */ private ByteBuffer createWatchesMessage() { List<String> dataWatches = new ArrayList<String>(1); dataWatches.add("/"); List<String> existWatches = Collections.emptyList(); List<String> childWatches = Collections.emptyList(); SetWatches sw = new SetWatches(1L, dataWatches, existWatches, childWatches); RequestHeader h = new RequestHeader(); h.setType(ZooDefs.OpCode.setWatches); h.setXid(-8); MockPacket p = new MockPacket(h, new ReplyHeader(), sw, null, null); return p.createAndReturnBB(); } /** * This is the secret that we use to generate passwords, for the moment it * is more of a sanity check. */ static final private long superSecret = 0XB3415C00L; /** * Create a connection request * * @return a serialized connection request */ private ByteBuffer createConnRequest() { Random r = new Random(SESSION_ID ^ superSecret); byte p[] = new byte[16]; r.nextBytes(p); ConnectRequest conReq = new ConnectRequest(0, 1L, 30000, SESSION_ID, p); MockPacket packet = new MockPacket(null, null, conReq, null, null, false); return packet.createAndReturnBB(); } /** * Mock a client channel with a connection request and a watches message * inside. * * @return a socket channel * @throws IOException */ private SocketChannel createClientSocketChannel() throws IOException { SocketChannel socketChannel = mock(SocketChannel.class); Socket socket = mock(Socket.class); InetSocketAddress socketAddress = new InetSocketAddress(1234); when(socket.getRemoteSocketAddress()).thenReturn(socketAddress); when(socketChannel.socket()).thenReturn(socket); // Send watches packet to server connection final ByteBuffer connRequest = createConnRequest(); final ByteBuffer watchesMessage = createWatchesMessage(); final ByteBuffer request = ByteBuffer.allocate(connRequest.limit() + watchesMessage.limit()); request.put(connRequest); request.put(watchesMessage); Answer<Integer> answer = new Answer<Integer>() { int i = 0; @Override public Integer answer(InvocationOnMock invocation) throws Throwable { Object[] args = invocation.getArguments(); ByteBuffer bb = (ByteBuffer) args[0]; for (int k = 0; k < bb.limit(); k++) { bb.put(request.get(i)); i = i + 1; } return bb.limit(); } }; when(socketChannel.read(any(ByteBuffer.class))).thenAnswer(answer); return socketChannel; } /** * Forge an invalid session packet as a LEADER do * * @param valid <code>true</code> to create a valid session message * * @throws Exception */ private QuorumPacket createValidateSessionPacketResponse(boolean valid) throws Exception { QuorumPacket qp = createValidateSessionPacket(); ByteArrayInputStream bis = new ByteArrayInputStream(qp.getData()); DataInputStream dis = new DataInputStream(bis); long id = dis.readLong(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(bos); dos.writeLong(id); // false means that the session has expired dos.writeBoolean(valid); qp.setData(bos.toByteArray()); return qp; } /** * Forge an validate session packet as a LEARNER do * * @return * @throws Exception */ private QuorumPacket createValidateSessionPacket() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); dos.writeLong(SESSION_ID); dos.writeInt(3000); dos.close(); QuorumPacket qp = new QuorumPacket(Leader.REVALIDATE, -1, baos.toByteArray(), null); return qp; } }
bit1129/open-source-projects
src/java/test/org/apache/zookeeper/server/quorum/WatchLeakTest.java
Java
apache-2.0
12,847
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the dynamodb-2012-08-10.normal.json service model. */ using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Text; using System.Xml.Serialization; using Amazon.DynamoDBv2.Model; using Amazon.Runtime; using Amazon.Runtime.Internal; using Amazon.Runtime.Internal.Transform; using Amazon.Runtime.Internal.Util; using ThirdParty.Json.LitJson; namespace Amazon.DynamoDBv2.Model.Internal.MarshallTransformations { /// <summary> /// GlobalSecondaryIndex Marshaller /// </summary> public class GlobalSecondaryIndexMarshaller : IRequestMarshaller<GlobalSecondaryIndex, JsonMarshallerContext> { public void Marshall(GlobalSecondaryIndex requestObject, JsonMarshallerContext context) { if(requestObject.IsSetIndexName()) { context.Writer.WritePropertyName("IndexName"); context.Writer.Write(requestObject.IndexName); } if(requestObject.IsSetKeySchema()) { context.Writer.WritePropertyName("KeySchema"); context.Writer.WriteArrayStart(); foreach(var requestObjectKeySchemaListValue in requestObject.KeySchema) { context.Writer.WriteObjectStart(); var marshaller = KeySchemaElementMarshaller.Instance; marshaller.Marshall(requestObjectKeySchemaListValue, context); context.Writer.WriteObjectEnd(); } context.Writer.WriteArrayEnd(); } if(requestObject.IsSetProjection()) { context.Writer.WritePropertyName("Projection"); context.Writer.WriteObjectStart(); var marshaller = ProjectionMarshaller.Instance; marshaller.Marshall(requestObject.Projection, context); context.Writer.WriteObjectEnd(); } if(requestObject.IsSetProvisionedThroughput()) { context.Writer.WritePropertyName("ProvisionedThroughput"); context.Writer.WriteObjectStart(); var marshaller = ProvisionedThroughputMarshaller.Instance; marshaller.Marshall(requestObject.ProvisionedThroughput, context); context.Writer.WriteObjectEnd(); } } public readonly static GlobalSecondaryIndexMarshaller Instance = new GlobalSecondaryIndexMarshaller(); } }
ykbarros/aws-sdk-xamarin
AWS.XamarinSDK/AWSSDK_iOS/Amazon.DynamoDBv2/Model/Internal/MarshallTransformations/GlobalSecondaryIndexMarshaller.cs
C#
apache-2.0
3,183
// Copyright John Maddock 2008. // Use, modification and distribution are subject to the // Boost Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // # include <pch.hpp> #ifndef BOOST_MATH_TR1_SOURCE # define BOOST_MATH_TR1_SOURCE #endif #include <boost/math/tr1.hpp> #include <boost/math/special_functions/next.hpp> #include "c_policy.hpp" namespace boost{ namespace math{ namespace tr1{ extern "C" long double BOOST_MATH_TR1_DECL boost_nexttowardl BOOST_PREVENT_MACRO_SUBSTITUTION(long double x, long double y) BOOST_MATH_C99_THROW_SPEC { return c_policies::nextafter BOOST_PREVENT_MACRO_SUBSTITUTION(x, y); } }}}
flingone/frameworks_base_cmds_remoted
libs/boost/libs/math/src/tr1/nexttowardl.cpp
C++
apache-2.0
727
"""Support for MySensors covers.""" from homeassistant.components import mysensors from homeassistant.components.cover import ATTR_POSITION, DOMAIN, CoverDevice from homeassistant.const import STATE_OFF, STATE_ON async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Set up the mysensors platform for covers.""" mysensors.setup_mysensors_platform( hass, DOMAIN, discovery_info, MySensorsCover, async_add_entities=async_add_entities) class MySensorsCover(mysensors.device.MySensorsEntity, CoverDevice): """Representation of the value of a MySensors Cover child node.""" @property def assumed_state(self): """Return True if unable to access real state of entity.""" return self.gateway.optimistic @property def is_closed(self): """Return True if cover is closed.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return self._values.get(set_req.V_DIMMER) == 0 return self._values.get(set_req.V_LIGHT) == STATE_OFF @property def current_cover_position(self): """Return current position of cover. None is unknown, 0 is closed, 100 is fully open. """ set_req = self.gateway.const.SetReq return self._values.get(set_req.V_DIMMER) async def async_open_cover(self, **kwargs): """Move the cover up.""" set_req = self.gateway.const.SetReq self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_UP, 1) if self.gateway.optimistic: # Optimistically assume that cover has changed state. if set_req.V_DIMMER in self._values: self._values[set_req.V_DIMMER] = 100 else: self._values[set_req.V_LIGHT] = STATE_ON self.async_schedule_update_ha_state() async def async_close_cover(self, **kwargs): """Move the cover down.""" set_req = self.gateway.const.SetReq self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_DOWN, 1) if self.gateway.optimistic: # Optimistically assume that cover has changed state. if set_req.V_DIMMER in self._values: self._values[set_req.V_DIMMER] = 0 else: self._values[set_req.V_LIGHT] = STATE_OFF self.async_schedule_update_ha_state() async def async_set_cover_position(self, **kwargs): """Move the cover to a specific position.""" position = kwargs.get(ATTR_POSITION) set_req = self.gateway.const.SetReq self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_DIMMER, position) if self.gateway.optimistic: # Optimistically assume that cover has changed state. self._values[set_req.V_DIMMER] = position self.async_schedule_update_ha_state() async def async_stop_cover(self, **kwargs): """Stop the device.""" set_req = self.gateway.const.SetReq self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_STOP, 1)
MartinHjelmare/home-assistant
homeassistant/components/mysensors/cover.py
Python
apache-2.0
3,195
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.cli.command.shell; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import org.springframework.boot.cli.command.AbstractCommand; import org.springframework.boot.cli.command.Command; import org.springframework.boot.cli.command.status.ExitStatus; import org.springframework.boot.loader.tools.RunProcess; import org.springframework.util.StringUtils; /** * Special {@link Command} used to run a process from the shell. NOTE: this command is not * directly installed into the shell. * * @author Phillip Webb */ class RunProcessCommand extends AbstractCommand { private final String[] command; private volatile RunProcess process; RunProcessCommand(String... command) { super(null, null); this.command = command; } @Override public ExitStatus run(String... args) throws Exception { return run(Arrays.asList(args)); } protected ExitStatus run(Collection<String> args) throws IOException { this.process = new RunProcess(this.command); int code = this.process.run(true, StringUtils.toStringArray(args)); if (code == 0) { return ExitStatus.OK; } else { return new ExitStatus(code, "EXTERNAL_ERROR"); } } public boolean handleSigInt() { return this.process.handleSigInt(); } }
hello2009chen/spring-boot
spring-boot-project/spring-boot-cli/src/main/java/org/springframework/boot/cli/command/shell/RunProcessCommand.java
Java
apache-2.0
1,906
/** * This class is generated by jOOQ */ package io.cattle.platform.core.model.tables; /** * This class is generated by jOOQ. */ @javax.annotation.Generated(value = { "http://www.jooq.org", "3.3.0" }, comments = "This class is generated by jOOQ") @java.lang.SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class AgentTable extends org.jooq.impl.TableImpl<io.cattle.platform.core.model.tables.records.AgentRecord> { private static final long serialVersionUID = -328097319; /** * The singleton instance of <code>cattle.agent</code> */ public static final io.cattle.platform.core.model.tables.AgentTable AGENT = new io.cattle.platform.core.model.tables.AgentTable(); /** * The class holding records for this type */ @Override public java.lang.Class<io.cattle.platform.core.model.tables.records.AgentRecord> getRecordType() { return io.cattle.platform.core.model.tables.records.AgentRecord.class; } /** * The column <code>cattle.agent.id</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.Long> ID = createField("id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, ""); /** * The column <code>cattle.agent.name</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.String> NAME = createField("name", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, ""); /** * The column <code>cattle.agent.account_id</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.Long> ACCOUNT_ID = createField("account_id", org.jooq.impl.SQLDataType.BIGINT, this, ""); /** * The column <code>cattle.agent.kind</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.String> KIND = createField("kind", org.jooq.impl.SQLDataType.VARCHAR.length(255).nullable(false), this, ""); /** * The column <code>cattle.agent.uuid</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.String> UUID = createField("uuid", org.jooq.impl.SQLDataType.VARCHAR.length(128).nullable(false), this, ""); /** * The column <code>cattle.agent.description</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.String> DESCRIPTION = createField("description", org.jooq.impl.SQLDataType.VARCHAR.length(1024), this, ""); /** * The column <code>cattle.agent.state</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.String> STATE = createField("state", org.jooq.impl.SQLDataType.VARCHAR.length(128).nullable(false), this, ""); /** * The column <code>cattle.agent.created</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.util.Date> CREATED = createField("created", org.jooq.impl.SQLDataType.TIMESTAMP.asConvertedDataType(new io.cattle.platform.db.jooq.converter.DateConverter()), this, ""); /** * The column <code>cattle.agent.removed</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.util.Date> REMOVED = createField("removed", org.jooq.impl.SQLDataType.TIMESTAMP.asConvertedDataType(new io.cattle.platform.db.jooq.converter.DateConverter()), this, ""); /** * The column <code>cattle.agent.remove_time</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.util.Date> REMOVE_TIME = createField("remove_time", org.jooq.impl.SQLDataType.TIMESTAMP.asConvertedDataType(new io.cattle.platform.db.jooq.converter.DateConverter()), this, ""); /** * The column <code>cattle.agent.data</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.util.Map<String,Object>> DATA = createField("data", org.jooq.impl.SQLDataType.CLOB.length(16777215).asConvertedDataType(new io.cattle.platform.db.jooq.converter.DataConverter()), this, ""); /** * The column <code>cattle.agent.uri</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.String> URI = createField("uri", org.jooq.impl.SQLDataType.VARCHAR.length(255), this, ""); /** * The column <code>cattle.agent.managed_config</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.Boolean> MANAGED_CONFIG = createField("managed_config", org.jooq.impl.SQLDataType.BIT.nullable(false).defaulted(true), this, ""); /** * The column <code>cattle.agent.agent_group_id</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.Long> AGENT_GROUP_ID = createField("agent_group_id", org.jooq.impl.SQLDataType.BIGINT, this, ""); /** * The column <code>cattle.agent.zone_id</code>. */ public final org.jooq.TableField<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.Long> ZONE_ID = createField("zone_id", org.jooq.impl.SQLDataType.BIGINT, this, ""); /** * Create a <code>cattle.agent</code> table reference */ public AgentTable() { this("agent", null); } /** * Create an aliased <code>cattle.agent</code> table reference */ public AgentTable(java.lang.String alias) { this(alias, io.cattle.platform.core.model.tables.AgentTable.AGENT); } private AgentTable(java.lang.String alias, org.jooq.Table<io.cattle.platform.core.model.tables.records.AgentRecord> aliased) { this(alias, aliased, null); } private AgentTable(java.lang.String alias, org.jooq.Table<io.cattle.platform.core.model.tables.records.AgentRecord> aliased, org.jooq.Field<?>[] parameters) { super(alias, io.cattle.platform.core.model.CattleTable.CATTLE, aliased, parameters, ""); } /** * {@inheritDoc} */ @Override public org.jooq.Identity<io.cattle.platform.core.model.tables.records.AgentRecord, java.lang.Long> getIdentity() { return io.cattle.platform.core.model.Keys.IDENTITY_AGENT; } /** * {@inheritDoc} */ @Override public org.jooq.UniqueKey<io.cattle.platform.core.model.tables.records.AgentRecord> getPrimaryKey() { return io.cattle.platform.core.model.Keys.KEY_AGENT_PRIMARY; } /** * {@inheritDoc} */ @Override public java.util.List<org.jooq.UniqueKey<io.cattle.platform.core.model.tables.records.AgentRecord>> getKeys() { return java.util.Arrays.<org.jooq.UniqueKey<io.cattle.platform.core.model.tables.records.AgentRecord>>asList(io.cattle.platform.core.model.Keys.KEY_AGENT_PRIMARY, io.cattle.platform.core.model.Keys.KEY_AGENT_IDX_AGENT_UUID); } /** * {@inheritDoc} */ @Override public java.util.List<org.jooq.ForeignKey<io.cattle.platform.core.model.tables.records.AgentRecord, ?>> getReferences() { return java.util.Arrays.<org.jooq.ForeignKey<io.cattle.platform.core.model.tables.records.AgentRecord, ?>>asList(io.cattle.platform.core.model.Keys.FK_AGENT__ACCOUNT_ID, io.cattle.platform.core.model.Keys.FK_AGENT__AGENT_GROUP_ID, io.cattle.platform.core.model.Keys.FK_AGENT__ZONE_ID); } /** * {@inheritDoc} */ @Override public io.cattle.platform.core.model.tables.AgentTable as(java.lang.String alias) { return new io.cattle.platform.core.model.tables.AgentTable(alias, this); } /** * Rename this table */ public io.cattle.platform.core.model.tables.AgentTable rename(java.lang.String name) { return new io.cattle.platform.core.model.tables.AgentTable(name, null); } }
sonchang/cattle
code/iaas/model/src/main/java/io/cattle/platform/core/model/tables/AgentTable.java
Java
apache-2.0
7,604
import { expect } from 'chai'; import parse from 'url-parse'; import { buildDfpVideoUrl, buildAdpodVideoUrl } from 'modules/dfpAdServerVideo.js'; import adUnit from 'test/fixtures/video/adUnit.json'; import * as utils from 'src/utils.js'; import { config } from 'src/config.js'; import { targeting } from 'src/targeting.js'; import { auctionManager } from 'src/auctionManager.js'; import { gdprDataHandler, uspDataHandler } from 'src/adapterManager.js'; import * as adpod from 'modules/adpod.js'; import { server } from 'test/mocks/xhr.js'; const bid = { videoCacheKey: 'abc', adserverTargeting: { hb_uuid: 'abc', hb_cache_id: 'abc', }, }; describe('The DFP video support module', function () { it('should make a legal request URL when given the required params', function () { const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bid, params: { 'iu': 'my/adUnit', 'description_url': 'someUrl.com', } })); expect(url.protocol).to.equal('https:'); expect(url.host).to.equal('securepubads.g.doubleclick.net'); const queryParams = utils.parseQS(url.query); expect(queryParams).to.have.property('correlator'); expect(queryParams).to.have.property('description_url', 'someUrl.com'); expect(queryParams).to.have.property('env', 'vp'); expect(queryParams).to.have.property('gdfp_req', '1'); expect(queryParams).to.have.property('iu', 'my/adUnit'); expect(queryParams).to.have.property('output', 'vast'); expect(queryParams).to.have.property('sz', '640x480'); expect(queryParams).to.have.property('unviewed_position_start', '1'); expect(queryParams).to.have.property('url'); }); it('can take an adserver url as a parameter', function () { const bidCopy = utils.deepClone(bid); bidCopy.vastUrl = 'vastUrl.example'; const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bidCopy, url: 'https://video.adserver.example/', })); expect(url.host).to.equal('video.adserver.example'); const queryObject = utils.parseQS(url.query); expect(queryObject.description_url).to.equal('vastUrl.example'); }); it('requires a params object or url', function () { const url = buildDfpVideoUrl({ adUnit: adUnit, bid: bid, }); expect(url).to.be.undefined; }); it('overwrites url params when both url and params object are given', function () { const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bid, url: 'https://video.adserver.example/ads?sz=640x480&iu=/123/aduniturl&impl=s', params: { iu: 'my/adUnit' } })); const queryObject = utils.parseQS(url.query); expect(queryObject.iu).to.equal('my/adUnit'); }); it('should override param defaults with user-provided ones', function () { const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bid, params: { 'iu': 'my/adUnit', 'output': 'vast', } })); expect(utils.parseQS(url.query)).to.have.property('output', 'vast'); }); it('should include the cache key and adserver targeting in cust_params', function () { const bidCopy = utils.deepClone(bid); bidCopy.adserverTargeting = Object.assign(bidCopy.adserverTargeting, { hb_adid: 'ad_id', }); const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bidCopy, params: { 'iu': 'my/adUnit' } })); const queryObject = utils.parseQS(url.query); const customParams = utils.parseQS('?' + decodeURIComponent(queryObject.cust_params)); expect(customParams).to.have.property('hb_adid', 'ad_id'); expect(customParams).to.have.property('hb_uuid', bid.videoCacheKey); expect(customParams).to.have.property('hb_cache_id', bid.videoCacheKey); }); it('should include the us_privacy key when USP Consent is available', function () { let uspDataHandlerStub = sinon.stub(uspDataHandler, 'getConsentData'); uspDataHandlerStub.returns('1YYY'); const bidCopy = utils.deepClone(bid); bidCopy.adserverTargeting = Object.assign(bidCopy.adserverTargeting, { hb_adid: 'ad_id', }); const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bidCopy, params: { 'iu': 'my/adUnit' } })); const queryObject = utils.parseQS(url.query); expect(queryObject.us_privacy).to.equal('1YYY'); uspDataHandlerStub.restore(); }); it('should not include the us_privacy key when USP Consent is not available', function () { const bidCopy = utils.deepClone(bid); bidCopy.adserverTargeting = Object.assign(bidCopy.adserverTargeting, { hb_adid: 'ad_id', }); const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bidCopy, params: { 'iu': 'my/adUnit' } })); const queryObject = utils.parseQS(url.query); expect(queryObject.us_privacy).to.equal(undefined); }); it('should include the GDPR keys when GDPR Consent is available', function () { let gdprDataHandlerStub = sinon.stub(gdprDataHandler, 'getConsentData'); gdprDataHandlerStub.returns({ gdprApplies: true, consentString: 'consent', addtlConsent: 'moreConsent' }); const bidCopy = utils.deepClone(bid); bidCopy.adserverTargeting = Object.assign(bidCopy.adserverTargeting, { hb_adid: 'ad_id', }); const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bidCopy, params: { 'iu': 'my/adUnit' } })); const queryObject = utils.parseQS(url.query); expect(queryObject.gdpr).to.equal('1'); expect(queryObject.gdpr_consent).to.equal('consent'); expect(queryObject.addtl_consent).to.equal('moreConsent'); gdprDataHandlerStub.restore(); }); it('should not include the GDPR keys when GDPR Consent is not available', function () { const bidCopy = utils.deepClone(bid); bidCopy.adserverTargeting = Object.assign(bidCopy.adserverTargeting, { hb_adid: 'ad_id', }); const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bidCopy, params: { 'iu': 'my/adUnit' } })); const queryObject = utils.parseQS(url.query); expect(queryObject.gdpr).to.equal(undefined); expect(queryObject.gdpr_consent).to.equal(undefined); expect(queryObject.addtl_consent).to.equal(undefined); }); it('should only include the GDPR keys for GDPR Consent fields with values', function () { let gdprDataHandlerStub = sinon.stub(gdprDataHandler, 'getConsentData'); gdprDataHandlerStub.returns({ gdprApplies: true, consentString: 'consent', }); const bidCopy = utils.deepClone(bid); bidCopy.adserverTargeting = Object.assign(bidCopy.adserverTargeting, { hb_adid: 'ad_id', }); const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bidCopy, params: { 'iu': 'my/adUnit' } })); const queryObject = utils.parseQS(url.query); expect(queryObject.gdpr).to.equal('1'); expect(queryObject.gdpr_consent).to.equal('consent'); expect(queryObject.addtl_consent).to.equal(undefined); gdprDataHandlerStub.restore(); }); describe('special targeting unit test', function () { const allTargetingData = { 'hb_format': 'video', 'hb_source': 'client', 'hb_size': '640x480', 'hb_pb': '5.00', 'hb_adid': '2c4f6cc3ba128a', 'hb_bidder': 'testBidder2', 'hb_format_testBidder2': 'video', 'hb_source_testBidder2': 'client', 'hb_size_testBidder2': '640x480', 'hb_pb_testBidder2': '5.00', 'hb_adid_testBidder2': '2c4f6cc3ba128a', 'hb_bidder_testBidder2': 'testBidder2', 'hb_format_appnexus': 'video', 'hb_source_appnexus': 'client', 'hb_size_appnexus': '640x480', 'hb_pb_appnexus': '5.00', 'hb_adid_appnexus': '44e0b5f2e5cace', 'hb_bidder_appnexus': 'appnexus' }; let targetingStub; before(function () { targetingStub = sinon.stub(targeting, 'getAllTargeting'); targetingStub.returns({'video1': allTargetingData}); config.setConfig({ enableSendAllBids: true }); }); after(function () { config.resetConfig(); targetingStub.restore(); }); it('should include all adserver targeting in cust_params if pbjs.enableSendAllBids is true', function () { const adUnitsCopy = utils.deepClone(adUnit); adUnitsCopy.bids.push({ 'bidder': 'testBidder2', 'params': { 'placementId': '9333431', 'video': { 'skipppable': false, 'playback_methods': ['auto_play_sound_off'] } } }); const bidCopy = utils.deepClone(bid); bidCopy.adserverTargeting = Object.assign(bidCopy.adserverTargeting, { hb_adid: 'ad_id', }); const url = parse(buildDfpVideoUrl({ adUnit: adUnitsCopy, bid: bidCopy, params: { 'iu': 'my/adUnit' } })); const queryObject = utils.parseQS(url.query); const customParams = utils.parseQS('?' + decodeURIComponent(queryObject.cust_params)); expect(customParams).to.have.property('hb_adid', 'ad_id'); expect(customParams).to.have.property('hb_uuid', bid.videoCacheKey); expect(customParams).to.have.property('hb_cache_id', bid.videoCacheKey); expect(customParams).to.have.property('hb_bidder_appnexus', 'appnexus'); expect(customParams).to.have.property('hb_bidder_testBidder2', 'testBidder2'); }); }); it('should merge the user-provided cust_params with the default ones', function () { const bidCopy = utils.deepClone(bid); bidCopy.adserverTargeting = Object.assign(bidCopy.adserverTargeting, { hb_adid: 'ad_id', }); const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bidCopy, params: { 'iu': 'my/adUnit', cust_params: { 'my_targeting': 'foo', }, }, })); const queryObject = utils.parseQS(url.query); const customParams = utils.parseQS('?' + decodeURIComponent(queryObject.cust_params)); expect(customParams).to.have.property('hb_adid', 'ad_id'); expect(customParams).to.have.property('my_targeting', 'foo'); }); it('should merge the user-provided cust-params with the default ones when using url object', function () { const bidCopy = utils.deepClone(bid); bidCopy.adserverTargeting = Object.assign(bidCopy.adserverTargeting, { hb_adid: 'ad_id', }); const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bidCopy, url: 'https://video.adserver.example/ads?sz=640x480&iu=/123/aduniturl&impl=s&cust_params=section%3dblog%26mykey%3dmyvalue' })); const queryObject = utils.parseQS(url.query); const customParams = utils.parseQS('?' + decodeURIComponent(queryObject.cust_params)); expect(customParams).to.have.property('hb_adid', 'ad_id'); expect(customParams).to.have.property('section', 'blog'); expect(customParams).to.have.property('mykey', 'myvalue'); expect(customParams).to.have.property('hb_uuid', 'abc'); expect(customParams).to.have.property('hb_cache_id', 'abc'); }); it('should not overwrite an existing description_url for object input and cache disabled', function () { const bidCopy = utils.deepClone(bid); bidCopy.vastUrl = 'vastUrl.example'; const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bidCopy, params: { iu: 'my/adUnit', description_url: 'descriptionurl.example' } })); const queryObject = utils.parseQS(url.query); expect(queryObject.description_url).to.equal('descriptionurl.example'); }); it('should work with nobid responses', function () { const url = buildDfpVideoUrl({ adUnit: adUnit, params: { 'iu': 'my/adUnit' } }); expect(url).to.be.a('string'); }); it('should include hb_uuid and hb_cache_id in cust_params when both keys are exluded from overwritten bidderSettings', function () { const bidCopy = utils.deepClone(bid); delete bidCopy.adserverTargeting.hb_uuid; delete bidCopy.adserverTargeting.hb_cache_id; const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bidCopy, params: { 'iu': 'my/adUnit' } })); const queryObject = utils.parseQS(url.query); const customParams = utils.parseQS('?' + decodeURIComponent(queryObject.cust_params)); expect(customParams).to.have.property('hb_uuid', bid.videoCacheKey); expect(customParams).to.have.property('hb_cache_id', bid.videoCacheKey); }); it('should include hb_uuid and hb_cache_id in cust params from overwritten standard bidderSettings', function () { const bidCopy = utils.deepClone(bid); bidCopy.adserverTargeting = Object.assign(bidCopy.adserverTargeting, { hb_uuid: 'def', hb_cache_id: 'def' }); const url = parse(buildDfpVideoUrl({ adUnit: adUnit, bid: bidCopy, params: { 'iu': 'my/adUnit' } })); const queryObject = utils.parseQS(url.query); const customParams = utils.parseQS('?' + decodeURIComponent(queryObject.cust_params)); expect(customParams).to.have.property('hb_uuid', 'def'); expect(customParams).to.have.property('hb_cache_id', 'def'); }); describe('adpod unit tests', function () { let amStub; let amGetAdUnitsStub; before(function () { let adUnits = [{ code: 'adUnitCode-1', mediaTypes: { video: { context: 'adpod', playerSize: [640, 480], adPodDurationSec: 60, durationRangeSec: [15, 30], requireExactDuration: true } }, bids: [ { bidder: 'appnexus', params: { placementId: 14542875, } } ] }]; amGetAdUnitsStub = sinon.stub(auctionManager, 'getAdUnits'); amGetAdUnitsStub.returns(adUnits); amStub = sinon.stub(auctionManager, 'getBidsReceived'); }); beforeEach(function () { config.setConfig({ adpod: { brandCategoryExclusion: true, deferCaching: false } }); }) afterEach(function() { config.resetConfig(); }); after(function () { amGetAdUnitsStub.restore(); amStub.restore(); }); it('should return masterTag url', function() { amStub.returns(getBidsReceived()); let uspDataHandlerStub = sinon.stub(uspDataHandler, 'getConsentData'); uspDataHandlerStub.returns('1YYY'); let gdprDataHandlerStub = sinon.stub(gdprDataHandler, 'getConsentData'); gdprDataHandlerStub.returns({ gdprApplies: true, consentString: 'consent', addtlConsent: 'moreConsent' }); let url; parse(buildAdpodVideoUrl({ code: 'adUnitCode-1', callback: handleResponse, params: { 'iu': 'my/adUnit', 'description_url': 'someUrl.com', } })); function handleResponse(err, masterTag) { if (err) { return; } url = parse(masterTag); expect(url.protocol).to.equal('https:'); expect(url.host).to.equal('securepubads.g.doubleclick.net'); const queryParams = utils.parseQS(url.query); expect(queryParams).to.have.property('correlator'); expect(queryParams).to.have.property('description_url', 'someUrl.com'); expect(queryParams).to.have.property('env', 'vp'); expect(queryParams).to.have.property('gdfp_req', '1'); expect(queryParams).to.have.property('iu', 'my/adUnit'); expect(queryParams).to.have.property('output', 'vast'); expect(queryParams).to.have.property('sz', '640x480'); expect(queryParams).to.have.property('unviewed_position_start', '1'); expect(queryParams).to.have.property('url'); expect(queryParams).to.have.property('cust_params'); expect(queryParams).to.have.property('us_privacy', '1YYY'); expect(queryParams).to.have.property('gdpr', '1'); expect(queryParams).to.have.property('gdpr_consent', 'consent'); expect(queryParams).to.have.property('addtl_consent', 'moreConsent'); const custParams = utils.parseQS(decodeURIComponent(queryParams.cust_params)); expect(custParams).to.have.property('hb_cache_id', '123'); expect(custParams).to.have.property('hb_pb_cat_dur', '15.00_395_15s,15.00_406_30s,10.00_395_15s'); uspDataHandlerStub.restore(); gdprDataHandlerStub.restore(); } }); it('should return masterTag url with correct custom params when brandCategoryExclusion is false', function() { config.setConfig({ adpod: { brandCategoryExclusion: false, } }); function getBids() { let bids = [ createBid(10, 'adUnitCode-1', 15, '10.00_15s', '123', '395', '10.00'), createBid(15, 'adUnitCode-1', 15, '15.00_15s', '123', '395', '15.00'), createBid(25, 'adUnitCode-1', 30, '15.00_30s', '123', '406', '25.00'), ]; bids.forEach((bid) => { delete bid.meta; }); return bids; } amStub.returns(getBids()); let url; parse(buildAdpodVideoUrl({ code: 'adUnitCode-1', callback: handleResponse, params: { 'iu': 'my/adUnit', 'description_url': 'someUrl.com', } })); function handleResponse(err, masterTag) { if (err) { return; } url = parse(masterTag); expect(url.protocol).to.equal('https:'); expect(url.host).to.equal('securepubads.g.doubleclick.net'); const queryParams = utils.parseQS(url.query); expect(queryParams).to.have.property('correlator'); expect(queryParams).to.have.property('description_url', 'someUrl.com'); expect(queryParams).to.have.property('env', 'vp'); expect(queryParams).to.have.property('gdfp_req', '1'); expect(queryParams).to.have.property('iu', 'my/adUnit'); expect(queryParams).to.have.property('output', 'xml_vast3'); expect(queryParams).to.have.property('sz', '640x480'); expect(queryParams).to.have.property('unviewed_position_start', '1'); expect(queryParams).to.have.property('url'); expect(queryParams).to.have.property('cust_params'); const custParams = utils.parseQS(decodeURIComponent(queryParams.cust_params)); expect(custParams).to.have.property('hb_cache_id', '123'); expect(custParams).to.have.property('hb_pb_cat_dur', '10.00_15s,15.00_15s,15.00_30s'); } }); it('should handle error when cache fails', function() { config.setConfig({ adpod: { brandCategoryExclusion: true, deferCaching: true } }); amStub.returns(getBidsReceived()); parse(buildAdpodVideoUrl({ code: 'adUnitCode-1', callback: handleResponse, params: { 'iu': 'my/adUnit', 'description_url': 'someUrl.com', } })); server.requests[0].respond(503, { 'Content-Type': 'plain/text', }, 'The server could not save anything at the moment.'); function handleResponse(err, masterTag) { expect(masterTag).to.be.null; expect(err).to.be.an('error'); } }); }) }); function getBidsReceived() { return [ createBid(10, 'adUnitCode-1', 15, '10.00_395_15s', '123', '395', '10.00'), createBid(15, 'adUnitCode-1', 15, '15.00_395_15s', '123', '395', '15.00'), createBid(25, 'adUnitCode-1', 30, '15.00_406_30s', '123', '406', '25.00'), ] } function createBid(cpm, adUnitCode, durationBucket, priceIndustryDuration, uuid, label, hbpb) { return { 'bidderCode': 'appnexus', 'width': 640, 'height': 360, 'statusMessage': 'Bid available', 'adId': '28f24ced14586c', 'mediaType': 'video', 'source': 'client', 'requestId': '28f24ced14586c', 'cpm': cpm, 'creativeId': 97517771, 'currency': 'USD', 'netRevenue': true, 'ttl': 3600, 'adUnitCode': adUnitCode, 'video': { 'context': 'adpod', 'durationBucket': durationBucket }, 'appnexus': { 'buyerMemberId': 9325 }, 'vastUrl': 'http://some-vast-url.com', 'vastImpUrl': 'http://some-vast-imp-url.com', 'auctionId': 'ec266b31-d652-49c5-8295-e83fafe5532b', 'responseTimestamp': 1548442460888, 'requestTimestamp': 1548442460827, 'bidder': 'appnexus', 'timeToRespond': 61, 'pbLg': '5.00', 'pbMg': '5.00', 'pbHg': '5.00', 'pbAg': '5.00', 'pbDg': '5.00', 'pbCg': '', 'size': '640x360', 'adserverTargeting': { 'hb_bidder': 'appnexus', 'hb_adid': '28f24ced14586c', 'hb_pb': hbpb, 'hb_size': '640x360', 'hb_source': 'client', 'hb_format': 'video', 'hb_pb_cat_dur': priceIndustryDuration, 'hb_cache_id': uuid }, 'customCacheKey': `${priceIndustryDuration}_${uuid}`, 'meta': { 'primaryCatId': 'iab-1', 'adServerCatId': label }, 'videoCacheKey': '4cf395af-8fee-4960-af0e-88d44e399f14' } }
prebid/Prebid.js
test/spec/modules/dfpAdServerVideo_spec.js
JavaScript
apache-2.0
21,329
# Copyright 2015 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'google/apis/core/base_service' require 'google/apis/core/json_representation' require 'google/apis/core/hashable' require 'google/apis/errors' module Google module Apis module BooksV1 # Books API # # Lets you search for books and manage your Google Books library. # # @example # require 'google/apis/books_v1' # # Books = Google::Apis::BooksV1 # Alias the module # service = Books::BooksService.new # # @see https://developers.google.com/books/docs/v1/getting_started class BooksService < Google::Apis::Core::BaseService # @return [String] # API key. Your API key identifies your project and provides you with API access, # quota, and reports. Required unless you provide an OAuth 2.0 token. attr_accessor :key # @return [String] # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. attr_accessor :quota_user # @return [String] # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. attr_accessor :user_ip def initialize super('https://www.googleapis.com/', 'books/v1/') end # Retrieves metadata for a specific bookshelf for the specified user. # @param [String] user_id # ID of user for whom to retrieve bookshelves. # @param [String] shelf # ID of bookshelf to retrieve. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Bookshelf] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Bookshelf] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def get_bookshelf(user_id, shelf, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'users/{userId}/bookshelves/{shelf}' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Bookshelf::Representation command.response_class = Google::Apis::BooksV1::Bookshelf command.params['userId'] = user_id unless user_id.nil? command.params['shelf'] = shelf unless shelf.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Retrieves a list of public bookshelves for the specified user. # @param [String] user_id # ID of user for whom to retrieve bookshelves. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Bookshelves] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Bookshelves] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_bookshelves(user_id, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'users/{userId}/bookshelves' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Bookshelves::Representation command.response_class = Google::Apis::BooksV1::Bookshelves command.params['userId'] = user_id unless user_id.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Retrieves volumes in a specific bookshelf for the specified user. # @param [String] user_id # ID of user for whom to retrieve bookshelf volumes. # @param [String] shelf # ID of bookshelf to retrieve volumes. # @param [Fixnum] max_results # Maximum number of results to return # @param [Boolean] show_preorders # Set to true to show pre-ordered books. Defaults to false. # @param [String] source # String to identify the originator of this request. # @param [Fixnum] start_index # Index of the first element to return (starts at 0) # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Volumes] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Volumes] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_bookshelf_volumes(user_id, shelf, max_results: nil, show_preorders: nil, source: nil, start_index: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'users/{userId}/bookshelves/{shelf}/volumes' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Volumes::Representation command.response_class = Google::Apis::BooksV1::Volumes command.params['userId'] = user_id unless user_id.nil? command.params['shelf'] = shelf unless shelf.nil? command.query['maxResults'] = max_results unless max_results.nil? command.query['showPreorders'] = show_preorders unless show_preorders.nil? command.query['source'] = source unless source.nil? command.query['startIndex'] = start_index unless start_index.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # # @param [String] drive_document_id # A drive document id. The upload_client_token must not be set. # @param [String] mime_type # The document MIME type. It can be set only if the drive_document_id is set. # @param [String] name # The document name. It can be set only if the drive_document_id is set. # @param [String] upload_client_token # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::LoadingResource] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::LoadingResource] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def add_book(drive_document_id: nil, mime_type: nil, name: nil, upload_client_token: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'cloudloading/addBook' command = make_simple_command(:post, path, options) command.response_representation = Google::Apis::BooksV1::LoadingResource::Representation command.response_class = Google::Apis::BooksV1::LoadingResource command.query['drive_document_id'] = drive_document_id unless drive_document_id.nil? command.query['mime_type'] = mime_type unless mime_type.nil? command.query['name'] = name unless name.nil? command.query['upload_client_token'] = upload_client_token unless upload_client_token.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Remove the book and its contents # @param [String] volume_id # The id of the book to be removed. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [NilClass] No result returned for this method # @yieldparam err [StandardError] error object if request failed # # @return [void] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def delete_book(volume_id, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'cloudloading/deleteBook' command = make_simple_command(:post, path, options) command.query['volumeId'] = volume_id unless volume_id.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # # @param [Google::Apis::BooksV1::LoadingResource] loading_resource_object # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::LoadingResource] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::LoadingResource] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def update_book(loading_resource_object = nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'cloudloading/updateBook' command = make_simple_command(:post, path, options) command.request_representation = Google::Apis::BooksV1::LoadingResource::Representation command.request_object = loading_resource_object command.response_representation = Google::Apis::BooksV1::LoadingResource::Representation command.response_class = Google::Apis::BooksV1::LoadingResource command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Returns a list of offline dictionary meatadata available # @param [String] cpksver # The device/version ID from which to request the data. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Metadata] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Metadata] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_offline_metadata_dictionary(cpksver, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'dictionary/listOfflineMetadata' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Metadata::Representation command.response_class = Google::Apis::BooksV1::Metadata command.query['cpksver'] = cpksver unless cpksver.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Gets the layer summary for a volume. # @param [String] volume_id # The volume to retrieve layers for. # @param [String] summary_id # The ID for the layer to get the summary for. # @param [String] content_version # The content version for the requested volume. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::LayerSummary] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::LayerSummary] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def get_layer(volume_id, summary_id, content_version: nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes/{volumeId}/layersummary/{summaryId}' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::LayerSummary::Representation command.response_class = Google::Apis::BooksV1::LayerSummary command.params['volumeId'] = volume_id unless volume_id.nil? command.params['summaryId'] = summary_id unless summary_id.nil? command.query['contentVersion'] = content_version unless content_version.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # List the layer summaries for a volume. # @param [String] volume_id # The volume to retrieve layers for. # @param [String] content_version # The content version for the requested volume. # @param [Fixnum] max_results # Maximum number of results to return # @param [String] page_token # The value of the nextToken from the previous page. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::LayerSummaries] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::LayerSummaries] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_layers(volume_id, content_version: nil, max_results: nil, page_token: nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes/{volumeId}/layersummary' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::LayerSummaries::Representation command.response_class = Google::Apis::BooksV1::LayerSummaries command.params['volumeId'] = volume_id unless volume_id.nil? command.query['contentVersion'] = content_version unless content_version.nil? command.query['maxResults'] = max_results unless max_results.nil? command.query['pageToken'] = page_token unless page_token.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Gets the annotation data. # @param [String] volume_id # The volume to retrieve annotations for. # @param [String] layer_id # The ID for the layer to get the annotations. # @param [String] annotation_data_id # The ID of the annotation data to retrieve. # @param [String] content_version # The content version for the volume you are trying to retrieve. # @param [Boolean] allow_web_definitions # For the dictionary layer. Whether or not to allow web definitions. # @param [Fixnum] h # The requested pixel height for any images. If height is provided width must # also be provided. # @param [String] locale # The locale information for the data. ISO-639-1 language and ISO-3166-1 country # code. Ex: 'en_US'. # @param [Fixnum] scale # The requested scale for the image. # @param [String] source # String to identify the originator of this request. # @param [Fixnum] w # The requested pixel width for any images. If width is provided height must # also be provided. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::AnnotationData] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::AnnotationData] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def get_layer_annotation_data(volume_id, layer_id, annotation_data_id, content_version, allow_web_definitions: nil, h: nil, locale: nil, scale: nil, source: nil, w: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes/{volumeId}/layers/{layerId}/data/{annotationDataId}' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::AnnotationData::Representation command.response_class = Google::Apis::BooksV1::AnnotationData command.params['volumeId'] = volume_id unless volume_id.nil? command.params['layerId'] = layer_id unless layer_id.nil? command.params['annotationDataId'] = annotation_data_id unless annotation_data_id.nil? command.query['allowWebDefinitions'] = allow_web_definitions unless allow_web_definitions.nil? command.query['contentVersion'] = content_version unless content_version.nil? command.query['h'] = h unless h.nil? command.query['locale'] = locale unless locale.nil? command.query['scale'] = scale unless scale.nil? command.query['source'] = source unless source.nil? command.query['w'] = w unless w.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Gets the annotation data for a volume and layer. # @param [String] volume_id # The volume to retrieve annotation data for. # @param [String] layer_id # The ID for the layer to get the annotation data. # @param [String] content_version # The content version for the requested volume. # @param [Array<String>, String] annotation_data_id # The list of Annotation Data Ids to retrieve. Pagination is ignored if this is # set. # @param [Fixnum] h # The requested pixel height for any images. If height is provided width must # also be provided. # @param [String] locale # The locale information for the data. ISO-639-1 language and ISO-3166-1 country # code. Ex: 'en_US'. # @param [Fixnum] max_results # Maximum number of results to return # @param [String] page_token # The value of the nextToken from the previous page. # @param [Fixnum] scale # The requested scale for the image. # @param [String] source # String to identify the originator of this request. # @param [String] updated_max # RFC 3339 timestamp to restrict to items updated prior to this timestamp ( # exclusive). # @param [String] updated_min # RFC 3339 timestamp to restrict to items updated since this timestamp ( # inclusive). # @param [Fixnum] w # The requested pixel width for any images. If width is provided height must # also be provided. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::AnnotationsData] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::AnnotationsData] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_layer_annotation_data(volume_id, layer_id, content_version, annotation_data_id: nil, h: nil, locale: nil, max_results: nil, page_token: nil, scale: nil, source: nil, updated_max: nil, updated_min: nil, w: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes/{volumeId}/layers/{layerId}/data' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::AnnotationsData::Representation command.response_class = Google::Apis::BooksV1::AnnotationsData command.params['volumeId'] = volume_id unless volume_id.nil? command.params['layerId'] = layer_id unless layer_id.nil? command.query['annotationDataId'] = annotation_data_id unless annotation_data_id.nil? command.query['contentVersion'] = content_version unless content_version.nil? command.query['h'] = h unless h.nil? command.query['locale'] = locale unless locale.nil? command.query['maxResults'] = max_results unless max_results.nil? command.query['pageToken'] = page_token unless page_token.nil? command.query['scale'] = scale unless scale.nil? command.query['source'] = source unless source.nil? command.query['updatedMax'] = updated_max unless updated_max.nil? command.query['updatedMin'] = updated_min unless updated_min.nil? command.query['w'] = w unless w.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Gets the volume annotation. # @param [String] volume_id # The volume to retrieve annotations for. # @param [String] layer_id # The ID for the layer to get the annotations. # @param [String] annotation_id # The ID of the volume annotation to retrieve. # @param [String] locale # The locale information for the data. ISO-639-1 language and ISO-3166-1 country # code. Ex: 'en_US'. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::VolumeAnnotation] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::VolumeAnnotation] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def get_layer_volume_annotation(volume_id, layer_id, annotation_id, locale: nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes/{volumeId}/layers/{layerId}/annotations/{annotationId}' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::VolumeAnnotation::Representation command.response_class = Google::Apis::BooksV1::VolumeAnnotation command.params['volumeId'] = volume_id unless volume_id.nil? command.params['layerId'] = layer_id unless layer_id.nil? command.params['annotationId'] = annotation_id unless annotation_id.nil? command.query['locale'] = locale unless locale.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Gets the volume annotations for a volume and layer. # @param [String] volume_id # The volume to retrieve annotations for. # @param [String] layer_id # The ID for the layer to get the annotations. # @param [String] content_version # The content version for the requested volume. # @param [String] end_offset # The end offset to end retrieving data from. # @param [String] end_position # The end position to end retrieving data from. # @param [String] locale # The locale information for the data. ISO-639-1 language and ISO-3166-1 country # code. Ex: 'en_US'. # @param [Fixnum] max_results # Maximum number of results to return # @param [String] page_token # The value of the nextToken from the previous page. # @param [Boolean] show_deleted # Set to true to return deleted annotations. updatedMin must be in the request # to use this. Defaults to false. # @param [String] source # String to identify the originator of this request. # @param [String] start_offset # The start offset to start retrieving data from. # @param [String] start_position # The start position to start retrieving data from. # @param [String] updated_max # RFC 3339 timestamp to restrict to items updated prior to this timestamp ( # exclusive). # @param [String] updated_min # RFC 3339 timestamp to restrict to items updated since this timestamp ( # inclusive). # @param [String] volume_annotations_version # The version of the volume annotations that you are requesting. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Volumeannotations] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Volumeannotations] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_layer_volume_annotations(volume_id, layer_id, content_version, end_offset: nil, end_position: nil, locale: nil, max_results: nil, page_token: nil, show_deleted: nil, source: nil, start_offset: nil, start_position: nil, updated_max: nil, updated_min: nil, volume_annotations_version: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes/{volumeId}/layers/{layerId}' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Volumeannotations::Representation command.response_class = Google::Apis::BooksV1::Volumeannotations command.params['volumeId'] = volume_id unless volume_id.nil? command.params['layerId'] = layer_id unless layer_id.nil? command.query['contentVersion'] = content_version unless content_version.nil? command.query['endOffset'] = end_offset unless end_offset.nil? command.query['endPosition'] = end_position unless end_position.nil? command.query['locale'] = locale unless locale.nil? command.query['maxResults'] = max_results unless max_results.nil? command.query['pageToken'] = page_token unless page_token.nil? command.query['showDeleted'] = show_deleted unless show_deleted.nil? command.query['source'] = source unless source.nil? command.query['startOffset'] = start_offset unless start_offset.nil? command.query['startPosition'] = start_position unless start_position.nil? command.query['updatedMax'] = updated_max unless updated_max.nil? command.query['updatedMin'] = updated_min unless updated_min.nil? command.query['volumeAnnotationsVersion'] = volume_annotations_version unless volume_annotations_version.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Gets the current settings for the user. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::UserSettings] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::UserSettings] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def get_user_settings(fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'myconfig/getUserSettings' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::UserSettings::Representation command.response_class = Google::Apis::BooksV1::UserSettings command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Release downloaded content access restriction. # @param [Array<String>, String] volume_ids # The volume(s) to release restrictions for. # @param [String] cpksver # The device/version ID from which to release the restriction. # @param [String] locale # ISO-639-1, ISO-3166-1 codes for message localization, i.e. en_US. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::DownloadAccesses] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::DownloadAccesses] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def release_download_access(volume_ids, cpksver, locale: nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'myconfig/releaseDownloadAccess' command = make_simple_command(:post, path, options) command.response_representation = Google::Apis::BooksV1::DownloadAccesses::Representation command.response_class = Google::Apis::BooksV1::DownloadAccesses command.query['cpksver'] = cpksver unless cpksver.nil? command.query['locale'] = locale unless locale.nil? command.query['source'] = source unless source.nil? command.query['volumeIds'] = volume_ids unless volume_ids.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Request concurrent and download access restrictions. # @param [String] source # String to identify the originator of this request. # @param [String] volume_id # The volume to request concurrent/download restrictions for. # @param [String] nonce # The client nonce value. # @param [String] cpksver # The device/version ID from which to request the restrictions. # @param [String] license_types # The type of access license to request. If not specified, the default is BOTH. # @param [String] locale # ISO-639-1, ISO-3166-1 codes for message localization, i.e. en_US. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::RequestAccess] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::RequestAccess] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def request_access(source, volume_id, nonce, cpksver, license_types: nil, locale: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'myconfig/requestAccess' command = make_simple_command(:post, path, options) command.response_representation = Google::Apis::BooksV1::RequestAccess::Representation command.response_class = Google::Apis::BooksV1::RequestAccess command.query['cpksver'] = cpksver unless cpksver.nil? command.query['licenseTypes'] = license_types unless license_types.nil? command.query['locale'] = locale unless locale.nil? command.query['nonce'] = nonce unless nonce.nil? command.query['source'] = source unless source.nil? command.query['volumeId'] = volume_id unless volume_id.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Request downloaded content access for specified volumes on the My eBooks shelf. # @param [String] source # String to identify the originator of this request. # @param [String] nonce # The client nonce value. # @param [String] cpksver # The device/version ID from which to release the restriction. # @param [Array<String>, String] features # List of features supported by the client, i.e., 'RENTALS' # @param [String] locale # ISO-639-1, ISO-3166-1 codes for message localization, i.e. en_US. # @param [Boolean] show_preorders # Set to true to show pre-ordered books. Defaults to false. # @param [Array<String>, String] volume_ids # The volume(s) to request download restrictions for. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Volumes] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Volumes] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def sync_volume_licenses(source, nonce, cpksver, features: nil, locale: nil, show_preorders: nil, volume_ids: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'myconfig/syncVolumeLicenses' command = make_simple_command(:post, path, options) command.response_representation = Google::Apis::BooksV1::Volumes::Representation command.response_class = Google::Apis::BooksV1::Volumes command.query['cpksver'] = cpksver unless cpksver.nil? command.query['features'] = features unless features.nil? command.query['locale'] = locale unless locale.nil? command.query['nonce'] = nonce unless nonce.nil? command.query['showPreorders'] = show_preorders unless show_preorders.nil? command.query['source'] = source unless source.nil? command.query['volumeIds'] = volume_ids unless volume_ids.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Sets the settings for the user. If a sub-object is specified, it will # overwrite the existing sub-object stored in the server. Unspecified sub- # objects will retain the existing value. # @param [Google::Apis::BooksV1::UserSettings] user_settings_object # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::UserSettings] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::UserSettings] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def update_user_settings(user_settings_object = nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'myconfig/updateUserSettings' command = make_simple_command(:post, path, options) command.request_representation = Google::Apis::BooksV1::UserSettings::Representation command.request_object = user_settings_object command.response_representation = Google::Apis::BooksV1::UserSettings::Representation command.response_class = Google::Apis::BooksV1::UserSettings command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Deletes an annotation. # @param [String] annotation_id # The ID for the annotation to delete. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [NilClass] No result returned for this method # @yieldparam err [StandardError] error object if request failed # # @return [void] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def delete_my_library_annotation(annotation_id, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/annotations/{annotationId}' command = make_simple_command(:delete, path, options) command.params['annotationId'] = annotation_id unless annotation_id.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Inserts a new annotation. # @param [Google::Apis::BooksV1::Annotation] annotation_object # @param [String] country # ISO-3166-1 code to override the IP-based location. # @param [Boolean] show_only_summary_in_response # Requests that only the summary of the specified layer be provided in the # response. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Annotation] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Annotation] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def insert_my_library_annotation(annotation_object = nil, country: nil, show_only_summary_in_response: nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/annotations' command = make_simple_command(:post, path, options) command.request_representation = Google::Apis::BooksV1::Annotation::Representation command.request_object = annotation_object command.response_representation = Google::Apis::BooksV1::Annotation::Representation command.response_class = Google::Apis::BooksV1::Annotation command.query['country'] = country unless country.nil? command.query['showOnlySummaryInResponse'] = show_only_summary_in_response unless show_only_summary_in_response.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Retrieves a list of annotations, possibly filtered. # @param [String] content_version # The content version for the requested volume. # @param [String] layer_id # The layer ID to limit annotation by. # @param [Array<String>, String] layer_ids # The layer ID(s) to limit annotation by. # @param [Fixnum] max_results # Maximum number of results to return # @param [String] page_token # The value of the nextToken from the previous page. # @param [Boolean] show_deleted # Set to true to return deleted annotations. updatedMin must be in the request # to use this. Defaults to false. # @param [String] source # String to identify the originator of this request. # @param [String] updated_max # RFC 3339 timestamp to restrict to items updated prior to this timestamp ( # exclusive). # @param [String] updated_min # RFC 3339 timestamp to restrict to items updated since this timestamp ( # inclusive). # @param [String] volume_id # The volume to restrict annotations to. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Annotations] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Annotations] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_my_library_annotations(content_version: nil, layer_id: nil, layer_ids: nil, max_results: nil, page_token: nil, show_deleted: nil, source: nil, updated_max: nil, updated_min: nil, volume_id: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/annotations' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Annotations::Representation command.response_class = Google::Apis::BooksV1::Annotations command.query['contentVersion'] = content_version unless content_version.nil? command.query['layerId'] = layer_id unless layer_id.nil? command.query['layerIds'] = layer_ids unless layer_ids.nil? command.query['maxResults'] = max_results unless max_results.nil? command.query['pageToken'] = page_token unless page_token.nil? command.query['showDeleted'] = show_deleted unless show_deleted.nil? command.query['source'] = source unless source.nil? command.query['updatedMax'] = updated_max unless updated_max.nil? command.query['updatedMin'] = updated_min unless updated_min.nil? command.query['volumeId'] = volume_id unless volume_id.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Gets the summary of specified layers. # @param [Array<String>, String] layer_ids # Array of layer IDs to get the summary for. # @param [String] volume_id # Volume id to get the summary for. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::AnnotationsSummary] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::AnnotationsSummary] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def summarize_my_library_annotation(layer_ids, volume_id, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/annotations/summary' command = make_simple_command(:post, path, options) command.response_representation = Google::Apis::BooksV1::AnnotationsSummary::Representation command.response_class = Google::Apis::BooksV1::AnnotationsSummary command.query['layerIds'] = layer_ids unless layer_ids.nil? command.query['volumeId'] = volume_id unless volume_id.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Updates an existing annotation. # @param [String] annotation_id # The ID for the annotation to update. # @param [Google::Apis::BooksV1::Annotation] annotation_object # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Annotation] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Annotation] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def update_my_library_annotation(annotation_id, annotation_object = nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/annotations/{annotationId}' command = make_simple_command(:put, path, options) command.request_representation = Google::Apis::BooksV1::Annotation::Representation command.request_object = annotation_object command.response_representation = Google::Apis::BooksV1::Annotation::Representation command.response_class = Google::Apis::BooksV1::Annotation command.params['annotationId'] = annotation_id unless annotation_id.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Adds a volume to a bookshelf. # @param [String] shelf # ID of bookshelf to which to add a volume. # @param [String] volume_id # ID of volume to add. # @param [String] reason # The reason for which the book is added to the library. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [NilClass] No result returned for this method # @yieldparam err [StandardError] error object if request failed # # @return [void] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def add_my_library_volume(shelf, volume_id, reason: nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/bookshelves/{shelf}/addVolume' command = make_simple_command(:post, path, options) command.params['shelf'] = shelf unless shelf.nil? command.query['reason'] = reason unless reason.nil? command.query['source'] = source unless source.nil? command.query['volumeId'] = volume_id unless volume_id.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Clears all volumes from a bookshelf. # @param [String] shelf # ID of bookshelf from which to remove a volume. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [NilClass] No result returned for this method # @yieldparam err [StandardError] error object if request failed # # @return [void] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def clear_my_library_volumes(shelf, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/bookshelves/{shelf}/clearVolumes' command = make_simple_command(:post, path, options) command.params['shelf'] = shelf unless shelf.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Retrieves metadata for a specific bookshelf belonging to the authenticated # user. # @param [String] shelf # ID of bookshelf to retrieve. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Bookshelf] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Bookshelf] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def get_my_library_bookshelf(shelf, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/bookshelves/{shelf}' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Bookshelf::Representation command.response_class = Google::Apis::BooksV1::Bookshelf command.params['shelf'] = shelf unless shelf.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Retrieves a list of bookshelves belonging to the authenticated user. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Bookshelves] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Bookshelves] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_my_library_bookshelves(source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/bookshelves' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Bookshelves::Representation command.response_class = Google::Apis::BooksV1::Bookshelves command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Moves a volume within a bookshelf. # @param [String] shelf # ID of bookshelf with the volume. # @param [String] volume_id # ID of volume to move. # @param [Fixnum] volume_position # Position on shelf to move the item (0 puts the item before the current first # item, 1 puts it between the first and the second and so on.) # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [NilClass] No result returned for this method # @yieldparam err [StandardError] error object if request failed # # @return [void] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def move_my_library_volume(shelf, volume_id, volume_position, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/bookshelves/{shelf}/moveVolume' command = make_simple_command(:post, path, options) command.params['shelf'] = shelf unless shelf.nil? command.query['source'] = source unless source.nil? command.query['volumeId'] = volume_id unless volume_id.nil? command.query['volumePosition'] = volume_position unless volume_position.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Removes a volume from a bookshelf. # @param [String] shelf # ID of bookshelf from which to remove a volume. # @param [String] volume_id # ID of volume to remove. # @param [String] reason # The reason for which the book is removed from the library. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [NilClass] No result returned for this method # @yieldparam err [StandardError] error object if request failed # # @return [void] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def remove_my_library_volume(shelf, volume_id, reason: nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/bookshelves/{shelf}/removeVolume' command = make_simple_command(:post, path, options) command.params['shelf'] = shelf unless shelf.nil? command.query['reason'] = reason unless reason.nil? command.query['source'] = source unless source.nil? command.query['volumeId'] = volume_id unless volume_id.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Gets volume information for volumes on a bookshelf. # @param [String] shelf # The bookshelf ID or name retrieve volumes for. # @param [String] country # ISO-3166-1 code to override the IP-based location. # @param [Fixnum] max_results # Maximum number of results to return # @param [String] projection # Restrict information returned to a set of selected fields. # @param [String] q # Full-text search query string in this bookshelf. # @param [Boolean] show_preorders # Set to true to show pre-ordered books. Defaults to false. # @param [String] source # String to identify the originator of this request. # @param [Fixnum] start_index # Index of the first element to return (starts at 0) # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Volumes] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Volumes] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_my_library_volumes(shelf, country: nil, max_results: nil, projection: nil, q: nil, show_preorders: nil, source: nil, start_index: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/bookshelves/{shelf}/volumes' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Volumes::Representation command.response_class = Google::Apis::BooksV1::Volumes command.params['shelf'] = shelf unless shelf.nil? command.query['country'] = country unless country.nil? command.query['maxResults'] = max_results unless max_results.nil? command.query['projection'] = projection unless projection.nil? command.query['q'] = q unless q.nil? command.query['showPreorders'] = show_preorders unless show_preorders.nil? command.query['source'] = source unless source.nil? command.query['startIndex'] = start_index unless start_index.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Retrieves my reading position information for a volume. # @param [String] volume_id # ID of volume for which to retrieve a reading position. # @param [String] content_version # Volume content version for which this reading position is requested. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::ReadingPosition] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::ReadingPosition] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def get_my_library_reading_position(volume_id, content_version: nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/readingpositions/{volumeId}' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::ReadingPosition::Representation command.response_class = Google::Apis::BooksV1::ReadingPosition command.params['volumeId'] = volume_id unless volume_id.nil? command.query['contentVersion'] = content_version unless content_version.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Sets my reading position information for a volume. # @param [String] volume_id # ID of volume for which to update the reading position. # @param [String] timestamp # RFC 3339 UTC format timestamp associated with this reading position. # @param [String] position # Position string for the new volume reading position. # @param [String] action # Action that caused this reading position to be set. # @param [String] content_version # Volume content version for which this reading position applies. # @param [String] device_cookie # Random persistent device cookie optional on set position. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [NilClass] No result returned for this method # @yieldparam err [StandardError] error object if request failed # # @return [void] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def set_my_library_reading_position(volume_id, timestamp, position, action: nil, content_version: nil, device_cookie: nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'mylibrary/readingpositions/{volumeId}/setPosition' command = make_simple_command(:post, path, options) command.params['volumeId'] = volume_id unless volume_id.nil? command.query['action'] = action unless action.nil? command.query['contentVersion'] = content_version unless content_version.nil? command.query['deviceCookie'] = device_cookie unless device_cookie.nil? command.query['position'] = position unless position.nil? command.query['source'] = source unless source.nil? command.query['timestamp'] = timestamp unless timestamp.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # List categories for onboarding experience. # @param [String] locale # ISO-639-1 language and ISO-3166-1 country code. Default is en-US if unset. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Category] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Category] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_onboarding_categories(locale: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'onboarding/listCategories' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Category::Representation command.response_class = Google::Apis::BooksV1::Category command.query['locale'] = locale unless locale.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # List available volumes under categories for onboarding experience. # @param [Array<String>, String] category_id # List of category ids requested. # @param [String] locale # ISO-639-1 language and ISO-3166-1 country code. Default is en-US if unset. # @param [String] max_allowed_maturity_rating # The maximum allowed maturity rating of returned volumes. Books with a higher # maturity rating are filtered out. # @param [Fixnum] page_size # Number of maximum results per page to be included in the response. # @param [String] page_token # The value of the nextToken from the previous page. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Volume2] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Volume2] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_onboarding_category_volumes(category_id: nil, locale: nil, max_allowed_maturity_rating: nil, page_size: nil, page_token: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'onboarding/listCategoryVolumes' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Volume2::Representation command.response_class = Google::Apis::BooksV1::Volume2 command.query['categoryId'] = category_id unless category_id.nil? command.query['locale'] = locale unless locale.nil? command.query['maxAllowedMaturityRating'] = max_allowed_maturity_rating unless max_allowed_maturity_rating.nil? command.query['pageSize'] = page_size unless page_size.nil? command.query['pageToken'] = page_token unless page_token.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # # @param [String] android_id # device android_id # @param [String] device # device device # @param [String] manufacturer # device manufacturer # @param [String] model # device model # @param [String] offer_id # @param [String] product # device product # @param [String] serial # device serial # @param [String] volume_id # Volume id to exercise the offer # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [NilClass] No result returned for this method # @yieldparam err [StandardError] error object if request failed # # @return [void] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def accept_promo_offer(android_id: nil, device: nil, manufacturer: nil, model: nil, offer_id: nil, product: nil, serial: nil, volume_id: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'promooffer/accept' command = make_simple_command(:post, path, options) command.query['androidId'] = android_id unless android_id.nil? command.query['device'] = device unless device.nil? command.query['manufacturer'] = manufacturer unless manufacturer.nil? command.query['model'] = model unless model.nil? command.query['offerId'] = offer_id unless offer_id.nil? command.query['product'] = product unless product.nil? command.query['serial'] = serial unless serial.nil? command.query['volumeId'] = volume_id unless volume_id.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # # @param [String] android_id # device android_id # @param [String] device # device device # @param [String] manufacturer # device manufacturer # @param [String] model # device model # @param [String] offer_id # Offer to dimiss # @param [String] product # device product # @param [String] serial # device serial # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [NilClass] No result returned for this method # @yieldparam err [StandardError] error object if request failed # # @return [void] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def dismiss_promo_offer(android_id: nil, device: nil, manufacturer: nil, model: nil, offer_id: nil, product: nil, serial: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'promooffer/dismiss' command = make_simple_command(:post, path, options) command.query['androidId'] = android_id unless android_id.nil? command.query['device'] = device unless device.nil? command.query['manufacturer'] = manufacturer unless manufacturer.nil? command.query['model'] = model unless model.nil? command.query['offerId'] = offer_id unless offer_id.nil? command.query['product'] = product unless product.nil? command.query['serial'] = serial unless serial.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Returns a list of promo offers available to the user # @param [String] android_id # device android_id # @param [String] device # device device # @param [String] manufacturer # device manufacturer # @param [String] model # device model # @param [String] product # device product # @param [String] serial # device serial # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Offers] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Offers] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def get_promo_offer(android_id: nil, device: nil, manufacturer: nil, model: nil, product: nil, serial: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'promooffer/get' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Offers::Representation command.response_class = Google::Apis::BooksV1::Offers command.query['androidId'] = android_id unless android_id.nil? command.query['device'] = device unless device.nil? command.query['manufacturer'] = manufacturer unless manufacturer.nil? command.query['model'] = model unless model.nil? command.query['product'] = product unless product.nil? command.query['serial'] = serial unless serial.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Gets volume information for a single volume. # @param [String] volume_id # ID of volume to retrieve. # @param [String] country # ISO-3166-1 code to override the IP-based location. # @param [String] partner # Brand results for partner ID. # @param [String] projection # Restrict information returned to a set of selected fields. # @param [String] source # String to identify the originator of this request. # @param [Boolean] user_library_consistent_read # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Volume] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Volume] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def get_volume(volume_id, country: nil, partner: nil, projection: nil, source: nil, user_library_consistent_read: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes/{volumeId}' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Volume::Representation command.response_class = Google::Apis::BooksV1::Volume command.params['volumeId'] = volume_id unless volume_id.nil? command.query['country'] = country unless country.nil? command.query['partner'] = partner unless partner.nil? command.query['projection'] = projection unless projection.nil? command.query['source'] = source unless source.nil? command.query['user_library_consistent_read'] = user_library_consistent_read unless user_library_consistent_read.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Performs a book search. # @param [String] q # Full-text search query string. # @param [String] download # Restrict to volumes by download availability. # @param [String] filter # Filter search results. # @param [String] lang_restrict # Restrict results to books with this language code. # @param [String] library_restrict # Restrict search to this user's library. # @param [Fixnum] max_results # Maximum number of results to return. # @param [String] order_by # Sort search results. # @param [String] partner # Restrict and brand results for partner ID. # @param [String] print_type # Restrict to books or magazines. # @param [String] projection # Restrict information returned to a set of selected fields. # @param [Boolean] show_preorders # Set to true to show books available for preorder. Defaults to false. # @param [String] source # String to identify the originator of this request. # @param [Fixnum] start_index # Index of the first result to return (starts at 0) # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Volumes] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Volumes] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_volumes(q, download: nil, filter: nil, lang_restrict: nil, library_restrict: nil, max_results: nil, order_by: nil, partner: nil, print_type: nil, projection: nil, show_preorders: nil, source: nil, start_index: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Volumes::Representation command.response_class = Google::Apis::BooksV1::Volumes command.query['download'] = download unless download.nil? command.query['filter'] = filter unless filter.nil? command.query['langRestrict'] = lang_restrict unless lang_restrict.nil? command.query['libraryRestrict'] = library_restrict unless library_restrict.nil? command.query['maxResults'] = max_results unless max_results.nil? command.query['orderBy'] = order_by unless order_by.nil? command.query['partner'] = partner unless partner.nil? command.query['printType'] = print_type unless print_type.nil? command.query['projection'] = projection unless projection.nil? command.query['q'] = q unless q.nil? command.query['showPreorders'] = show_preorders unless show_preorders.nil? command.query['source'] = source unless source.nil? command.query['startIndex'] = start_index unless start_index.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Return a list of associated books. # @param [String] volume_id # ID of the source volume. # @param [String] association # Association type. # @param [String] locale # ISO-639-1 language and ISO-3166-1 country code. Ex: 'en_US'. Used for # generating recommendations. # @param [String] max_allowed_maturity_rating # The maximum allowed maturity rating of returned recommendations. Books with a # higher maturity rating are filtered out. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Volumes] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Volumes] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_associated_volumes(volume_id, association: nil, locale: nil, max_allowed_maturity_rating: nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes/{volumeId}/associated' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Volumes::Representation command.response_class = Google::Apis::BooksV1::Volumes command.params['volumeId'] = volume_id unless volume_id.nil? command.query['association'] = association unless association.nil? command.query['locale'] = locale unless locale.nil? command.query['maxAllowedMaturityRating'] = max_allowed_maturity_rating unless max_allowed_maturity_rating.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Return a list of books in My Library. # @param [Array<String>, String] acquire_method # How the book was aquired # @param [String] locale # ISO-639-1 language and ISO-3166-1 country code. Ex:'en_US'. Used for # generating recommendations. # @param [Fixnum] max_results # Maximum number of results to return. # @param [Array<String>, String] processing_state # The processing state of the user uploaded volumes to be returned. Applicable # only if the UPLOADED is specified in the acquireMethod. # @param [String] source # String to identify the originator of this request. # @param [Fixnum] start_index # Index of the first result to return (starts at 0) # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Volumes] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Volumes] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_my_books(acquire_method: nil, locale: nil, max_results: nil, processing_state: nil, source: nil, start_index: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes/mybooks' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Volumes::Representation command.response_class = Google::Apis::BooksV1::Volumes command.query['acquireMethod'] = acquire_method unless acquire_method.nil? command.query['locale'] = locale unless locale.nil? command.query['maxResults'] = max_results unless max_results.nil? command.query['processingState'] = processing_state unless processing_state.nil? command.query['source'] = source unless source.nil? command.query['startIndex'] = start_index unless start_index.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Return a list of recommended books for the current user. # @param [String] locale # ISO-639-1 language and ISO-3166-1 country code. Ex: 'en_US'. Used for # generating recommendations. # @param [String] max_allowed_maturity_rating # The maximum allowed maturity rating of returned recommendations. Books with a # higher maturity rating are filtered out. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Volumes] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Volumes] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_recommended_volumes(locale: nil, max_allowed_maturity_rating: nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes/recommended' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Volumes::Representation command.response_class = Google::Apis::BooksV1::Volumes command.query['locale'] = locale unless locale.nil? command.query['maxAllowedMaturityRating'] = max_allowed_maturity_rating unless max_allowed_maturity_rating.nil? command.query['source'] = source unless source.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Rate a recommended book for the current user. # @param [String] rating # Rating to be given to the volume. # @param [String] volume_id # ID of the source volume. # @param [String] locale # ISO-639-1 language and ISO-3166-1 country code. Ex: 'en_US'. Used for # generating recommendations. # @param [String] source # String to identify the originator of this request. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::RateRecommendedVolumeResponse] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::RateRecommendedVolumeResponse] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def rate_recommended_volume(rating, volume_id, locale: nil, source: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes/recommended/rate' command = make_simple_command(:post, path, options) command.response_representation = Google::Apis::BooksV1::RateRecommendedVolumeResponse::Representation command.response_class = Google::Apis::BooksV1::RateRecommendedVolumeResponse command.query['locale'] = locale unless locale.nil? command.query['rating'] = rating unless rating.nil? command.query['source'] = source unless source.nil? command.query['volumeId'] = volume_id unless volume_id.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end # Return a list of books uploaded by the current user. # @param [String] locale # ISO-639-1 language and ISO-3166-1 country code. Ex: 'en_US'. Used for # generating recommendations. # @param [Fixnum] max_results # Maximum number of results to return. # @param [Array<String>, String] processing_state # The processing state of the user uploaded volumes to be returned. # @param [String] source # String to identify the originator of this request. # @param [Fixnum] start_index # Index of the first result to return (starts at 0) # @param [Array<String>, String] volume_id # The ids of the volumes to be returned. If not specified all that match the # processingState are returned. # @param [String] fields # Selector specifying which fields to include in a partial response. # @param [String] quota_user # Available to use for quota purposes for server-side applications. Can be any # arbitrary string assigned to a user, but should not exceed 40 characters. # Overrides userIp if both are provided. # @param [String] user_ip # IP address of the site where the request originates. Use this if you want to # enforce per-user limits. # @param [Google::Apis::RequestOptions] options # Request-specific options # # @yield [result, err] Result & error if block supplied # @yieldparam result [Google::Apis::BooksV1::Volumes] parsed result object # @yieldparam err [StandardError] error object if request failed # # @return [Google::Apis::BooksV1::Volumes] # # @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried # @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification # @raise [Google::Apis::AuthorizationError] Authorization is required def list_user_uploaded_volumes(locale: nil, max_results: nil, processing_state: nil, source: nil, start_index: nil, volume_id: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block) path = 'volumes/useruploaded' command = make_simple_command(:get, path, options) command.response_representation = Google::Apis::BooksV1::Volumes::Representation command.response_class = Google::Apis::BooksV1::Volumes command.query['locale'] = locale unless locale.nil? command.query['maxResults'] = max_results unless max_results.nil? command.query['processingState'] = processing_state unless processing_state.nil? command.query['source'] = source unless source.nil? command.query['startIndex'] = start_index unless start_index.nil? command.query['volumeId'] = volume_id unless volume_id.nil? command.query['fields'] = fields unless fields.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? execute_or_queue_command(command, &block) end protected def apply_command_defaults(command) command.query['key'] = key unless key.nil? command.query['quotaUser'] = quota_user unless quota_user.nil? command.query['userIp'] = user_ip unless user_ip.nil? end end end end end
hanpanpan200/google-api-ruby-client
generated/google/apis/books_v1/service.rb
Ruby
apache-2.0
126,385
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.kie.test.objects; public interface Building { public Integer getDoors(); }
droolsjbpm/jbpm
jbpm-services/jbpm-kie-services/src/test/java/org/jbpm/kie/test/objects/Building.java
Java
apache-2.0
720
#include <Wt/WApplication> #include <Wt/WStandardItemModel> #include <Wt/WTableView> #include "../treeview-dragdrop/CsvUtil.h" SAMPLE_BEGIN(SmallTableView) Wt::WTableView *tableView = new Wt::WTableView(); tableView->setModel(csvToModel(Wt::WApplication::appRoot() + "table.csv", tableView)); tableView->setColumnResizeEnabled(false); tableView->setColumnAlignment(0, Wt::AlignCenter); tableView->setHeaderAlignment(0, Wt::AlignCenter); tableView->setAlternatingRowColors(true); tableView->setRowHeight(28); tableView->setHeaderHeight(28); tableView->setSelectionMode(Wt::SingleSelection); tableView->setEditTriggers(Wt::WAbstractItemView::NoEditTrigger); /* * Configure column widths and matching table width */ const int WIDTH = 120; for (int i = 0; i < tableView->model()->columnCount(); ++i) tableView->setColumnWidth(i, 120); /* * 7 pixels are padding/border per column * 2 pixels are border of the entire table */ tableView->setWidth((WIDTH + 7) * tableView->model()->columnCount() + 2); SAMPLE_END(return tableView)
sanathkumarv/RestAPIWt
tools/wt-3.3.5-rc1/examples/widgetgallery/examples/SmallTableView.cpp
C++
apache-2.0
1,047
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.client.util; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.NavigableMap; import java.util.Random; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.ignite.internal.client.GridClientPredicate; import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; /** * Controls key to node affinity using consistent hash algorithm. This class is thread-safe * and does not have to be externally synchronized. * <p> * For a good explanation of what consistent hashing is, you can refer to * <a href="http://weblogs.java.net/blog/tomwhite/archive/2007/11/consistent_hash.html">Tom White's Blog</a>. */ public class GridClientConsistentHash<N> { /** Prime number. */ private static final int PRIME = 15485857; /** Random generator. */ private static final Random RAND = new Random(); /** Affinity seed. */ private final Object affSeed; /** Map of hash assignments. */ private final NavigableMap<Integer, SortedSet<N>> circle = new TreeMap<>(); /** Read/write lock. */ private final ReadWriteLock rw = new ReentrantReadWriteLock(); /** Distinct nodes in the hash. */ private Collection<N> nodes = new HashSet<>(); /** Nodes comparator to resolve hash codes collisions. */ private Comparator<N> nodesComp; /** * Constructs consistent hash using empty affinity seed and {@code MD5} hasher function. */ public GridClientConsistentHash() { this(null, null); } /** * Constructs consistent hash using given affinity seed and {@code MD5} hasher function. * * @param affSeed Affinity seed (will be used as key prefix for hashing). */ public GridClientConsistentHash(Object affSeed) { this(null, affSeed); } /** * Constructs consistent hash using given affinity seed and hasher function. * * @param nodesComp Nodes comparator to resolve hash codes collisions. * If {@code null} natural order will be used. * @param affSeed Affinity seed (will be used as key prefix for hashing). */ public GridClientConsistentHash(Comparator<N> nodesComp, Object affSeed) { this.nodesComp = nodesComp; this.affSeed = affSeed == null ? new Integer(PRIME) : affSeed; } /** * Adds nodes to consistent hash algorithm (if nodes are {@code null} or empty, then no-op). * * @param nodes Nodes to add. * @param replicas Number of replicas for every node. */ public void addNodes(Collection<N> nodes, int replicas) { if (nodes == null || nodes.isEmpty()) return; rw.writeLock().lock(); try { for (N node : nodes) addNode(node, replicas); } finally { rw.writeLock().unlock(); } } /** * Adds a node to consistent hash algorithm. * * @param node New node (if {@code null} then no-op). * @param replicas Number of replicas for the node. * @return {@code True} if node was added, {@code false} if it is {@code null} or * is already contained in the hash. */ public boolean addNode(N node, int replicas) { if (node == null) return false; long seed = affSeed.hashCode() * 31 + hash(node); rw.writeLock().lock(); try { if (!nodes.add(node)) return false; int hash = hash(seed); SortedSet<N> set = circle.get(hash); if (set == null) circle.put(hash, set = new TreeSet<>(nodesComp)); set.add(node); for (int i = 1; i <= replicas; i++) { seed = seed * affSeed.hashCode() + i; hash = hash(seed); set = circle.get(hash); if (set == null) circle.put(hash, set = new TreeSet<>(nodesComp)); set.add(node); } return true; } finally { rw.writeLock().unlock(); } } /** * Removes a node and all of its replicas. * * @param node Node to remove (if {@code null}, then no-op). * @return {@code True} if node was removed, {@code false} if node is {@code null} or * not present in hash. */ public boolean removeNode(N node) { if (node == null) return false; rw.writeLock().lock(); try { if (!nodes.remove(node)) return false; for (Iterator<SortedSet<N>> it = circle.values().iterator(); it.hasNext();) { SortedSet<N> set = it.next(); if (!set.remove(node)) continue; if (set.isEmpty()) it.remove(); } return true; } finally { rw.writeLock().unlock(); } } /** * Gets number of distinct nodes, excluding replicas, in consistent hash. * * @return Number of distinct nodes, excluding replicas, in consistent hash. */ public int count() { rw.readLock().lock(); try { return nodes.size(); } finally { rw.readLock().unlock(); } } /** * Gets size of all nodes (including replicas) in consistent hash. * * @return Size of all nodes (including replicas) in consistent hash. */ public int size() { rw.readLock().lock(); try { int size = 0; for (SortedSet<N> set : circle.values()) size += set.size(); return size; } finally { rw.readLock().unlock(); } } /** * Checks if consistent hash has nodes added to it. * * @return {@code True} if consistent hash is empty, {@code false} otherwise. */ public boolean isEmpty() { return count() == 0; } /** * Gets set of all distinct nodes in the consistent hash (in no particular order). * * @return Set of all distinct nodes in the consistent hash. */ public Set<N> nodes() { rw.readLock().lock(); try { return new HashSet<>(nodes); } finally { rw.readLock().unlock(); } } /** * Picks a random node from consistent hash. * * @return Random node from consistent hash or {@code null} if there are no nodes. */ public N random() { return node(RAND.nextLong()); } /** * Gets node for a key. * * @param key Key. * @return Node. */ public N node(Object key) { int hash = hash(key); rw.readLock().lock(); try { Map.Entry<Integer, SortedSet<N>> firstEntry = circle.firstEntry(); if (firstEntry == null) return null; Map.Entry<Integer, SortedSet<N>> tailEntry = circle.tailMap(hash, true).firstEntry(); // Get first node hash in the circle clock-wise. return circle.get(tailEntry == null ? firstEntry.getKey() : tailEntry.getKey()).first(); } finally { rw.readLock().unlock(); } } /** * Gets node for a given key. * * @param key Key to get node for. * @param inc Optional inclusion set. Only nodes contained in this set may be returned. * If {@code null}, then all nodes may be included. * @return Node for key, or {@code null} if node was not found. */ public N node(Object key, Collection<N> inc) { return node(key, inc, null); } /** * Gets node for a given key. * * @param key Key to get node for. * @param inc Optional inclusion set. Only nodes contained in this set may be returned. * If {@code null}, then all nodes may be included. * @param exc Optional exclusion set. Only nodes not contained in this set may be returned. * If {@code null}, then all nodes may be returned. * @return Node for key, or {@code null} if node was not found. */ public N node(Object key, @Nullable final Collection<N> inc, @Nullable final Collection<N> exc) { if (inc == null && exc == null) return node(key); return node(key, new GridClientPredicate<N>() { @Override public boolean apply(N n) { return (inc == null || inc.contains(n)) && (exc == null || !exc.contains(n)); } }); } /** * Gets node for a given key. * * @param key Key to get node for. * @param p Optional predicate for node filtering. * @return Node for key, or {@code null} if node was not found. */ public N node(Object key, GridClientPredicate<N>... p) { if (p == null || p.length == 0) return node(key); int hash = hash(key); rw.readLock().lock(); try { final int size = nodes.size(); if (size == 0) return null; Set<N> failed = null; // Move clock-wise starting from selected position 'hash'. for (SortedSet<N> set : circle.tailMap(hash, true).values()) { for (N n : set) { if (failed != null && failed.contains(n)) continue; if (apply(p, n)) return n; if (failed == null) failed = new HashSet<>(); failed.add(n); if (failed.size() == size) return null; } } // // Copy-paste is used to escape several new objects creation. // // Wrap around moving clock-wise from the circle start. for (SortedSet<N> set : circle.headMap(hash, false).values()) { // Circle head. for (N n : set) { if (failed != null && failed.contains(n)) continue; if (apply(p, n)) return n; if (failed == null) failed = U.newHashSet(size); failed.add(n); if (failed.size() == size) return null; } } return null; } finally { rw.readLock().unlock(); } } /** * Apply predicate to the node. * * @param p Predicate. * @param n Node. * @return {@code True} if filter passed or empty. */ private boolean apply(GridClientPredicate<N>[] p, N n) { if (p != null) { for (GridClientPredicate<? super N> r : p) { if (r != null && !r.apply(n)) return false; } } return true; } /** * Gets hash code for a given object. * * @param o Object to get hash code for. * @return Hash code. */ public static int hash(Object o) { int h = o == null ? 0 : o instanceof byte[] ? Arrays.hashCode((byte[])o) : o.hashCode(); // Spread bits to hash code. h += (h << 15) ^ 0xffffcd7d; h ^= (h >>> 10); h += (h << 3); h ^= (h >>> 6); h += (h << 2) + (h << 14); return h ^ (h >>> 16); } /** {@inheritDoc} */ @Override public String toString() { return getClass().getSimpleName() + " [affSeed=" + affSeed + ", circle=" + circle + ", nodesComp=" + nodesComp + ", nodes=" + nodes + "]"; } }
samaitra/ignite
modules/core/src/main/java/org/apache/ignite/internal/client/util/GridClientConsistentHash.java
Java
apache-2.0
12,863
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment.data; import io.druid.java.util.common.IAE; import java.io.IOException; public abstract class SingleValueIndexedIntsWriter implements IndexedIntsWriter { @Override public void add(Object obj) throws IOException { if (obj == null) { addValue(0); } else if (obj instanceof Integer) { addValue(((Number) obj).intValue()); } else if (obj instanceof int[]) { int[] vals = (int[]) obj; if (vals.length == 0) { addValue(0); } else { addValue(vals[0]); } } else { throw new IAE("Unsupported single value type: " + obj.getClass()); } } protected abstract void addValue(int val) throws IOException; }
erikdubbelboer/druid
processing/src/main/java/io/druid/segment/data/SingleValueIndexedIntsWriter.java
Java
apache-2.0
1,515
/* * Copyright 2013 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.executor.cdi; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.inject.Produces; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import javax.persistence.PersistenceUnit; import org.jbpm.shared.services.impl.TransactionalCommandService; @ApplicationScoped public class ExecutorDatabaseProducer { private EntityManagerFactory emf; @PersistenceUnit(unitName = "org.jbpm.executor") @ApplicationScoped @Produces public EntityManagerFactory getEntityManagerFactory() { if (this.emf == null) { // this needs to be here for non EE containers this.emf = Persistence.createEntityManagerFactory("org.jbpm.executor"); } return this.emf; } @Produces public TransactionalCommandService produceCommandService(EntityManagerFactory emf) { return new TransactionalCommandService(emf); } }
pleacu/jbpm
jbpm-services/jbpm-executor-cdi/src/test/java/org/jbpm/executor/cdi/ExecutorDatabaseProducer.java
Java
apache-2.0
1,548
// "Replace with 'Stream.mapToLong().sum()'" "true" import java.util.Collection; import java.util.List; class Test { void foo(List<List<String>> s) { long count = s.stream().peek(System.out::println).flatMap(Collection::stream).c<caret>ount(); } }
smmribeiro/intellij-community
java/java-tests/testData/inspection/inefficientStreamCount/beforePeekFlatMapCount.java
Java
apache-2.0
257
cask "paragon-ntfs" do version "15" sha256 :no_check # required as upstream package is updated in-place url "https://dl.paragon-software.com/demo/ntfsmac#{version}_trial.dmg" name "Paragon NTFS for Mac" homepage "https://www.paragon-software.com/ufsdhome/ntfs-mac/" auto_updates true installer manual: "FSInstaller.app" uninstall kext: "com.paragon-software.filesystems.ntfs", launchctl: "com.paragon-software.ntfs*", pkgutil: "com.paragon-software.pkg.ntfs", quit: "com.paragon-software.ntfs*", signal: [ ["KILL", "com.paragon-software.ntfs.FSMenuApp"], ["KILL", "com.paragon-software.ntfs.notification-agent"], ] zap trash: "~/Library/Preferences/com.paragon-software.ntfs.fsapp.plist" end
kingthorin/homebrew-cask
Casks/paragon-ntfs.rb
Ruby
bsd-2-clause
818
/* * Copyright 2008 Ayman Al-Sairafi ayman.alsairafi@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License * at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jsyntaxpane.components; import jsyntaxpane.actions.*; import java.awt.Color; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.swing.text.BadLocationException; import javax.swing.text.DefaultHighlighter; import javax.swing.text.Highlighter; import javax.swing.text.JTextComponent; import jsyntaxpane.SyntaxDocument; import jsyntaxpane.Token; /** * This class contains static utility methods to make highliting in text * components easier. * * @author Ayman Al-Sairafi */ public class Markers { // This subclass is used in our highlighting code public static class SimpleMarker extends DefaultHighlighter.DefaultHighlightPainter { public SimpleMarker(Color color) { super(color); } } /** * Removes only our private highlights * This is public so that we can remove the highlights when the editorKit * is unregistered. SimpleMarker can be null, in which case all instances of * our Markers are removed. * @param component the text component whose markers are to be removed * @param marker the SimpleMarker to remove */ public static void removeMarkers(JTextComponent component, SimpleMarker marker) { Highlighter hilite = component.getHighlighter(); Highlighter.Highlight[] hilites = hilite.getHighlights(); for (int i = 0; i < hilites.length; i++) { if (hilites[i].getPainter() instanceof SimpleMarker) { SimpleMarker hMarker = (SimpleMarker) hilites[i].getPainter(); if (marker == null || hMarker.equals(marker)) { hilite.removeHighlight(hilites[i]); } } } } /** * Remove all the markers from an JEditorPane * @param editorPane */ public static void removeMarkers(JTextComponent editorPane) { removeMarkers(editorPane, null); } /** * add highlights for the given Token on the given pane * @param pane * @param token * @param marker */ public static void markToken(JTextComponent pane, Token token, SimpleMarker marker) { markText(pane, token.start, token.end(), marker); } /** * add highlights for the given region on the given pane * @param pane * @param start * @param end * @param marker */ public static void markText(JTextComponent pane, int start, int end, SimpleMarker marker) { try { Highlighter hiliter = pane.getHighlighter(); int selStart = pane.getSelectionStart(); int selEnd = pane.getSelectionEnd(); // if there is no selection or selection does not overlap if(selStart == selEnd || end < selStart || start > selStart) { hiliter.addHighlight(start, end, marker); return; } // selection starts within the highlight, highlight before slection if(selStart > start && selStart < end ) { hiliter.addHighlight(start, selStart, marker); } // selection ends within the highlight, highlight remaining if(selEnd > start && selEnd < end ) { hiliter.addHighlight(selEnd, end, marker); } } catch (BadLocationException ex) { // nothing we can do if the request is out of bound LOG.log(Level.SEVERE, null, ex); } } /** * Mark all text in the document that matches the given pattern * @param pane control to use * @param pattern pattern to match * @param marker marker to use for highlighting */ public static void markAll(JTextComponent pane, Pattern pattern, SimpleMarker marker) { SyntaxDocument sDoc = ActionUtils.getSyntaxDocument(pane); if(sDoc == null || pattern == null) { return; } Matcher matcher = sDoc.getMatcher(pattern); // we may not have any matcher (due to undo or something, so don't do anything. if(matcher==null) { return; } while(matcher.find()) { markText(pane, matcher.start(), matcher.end(), marker); } } private static final Logger LOG = Logger.getLogger(Markers.class.getName()); }
zqq90/webit-editor
src/main/java/jsyntaxpane/components/Markers.java
Java
bsd-3-clause
4,976
# The absolute import feature is required so that we get the root celery # module rather than `amo.celery`. from __future__ import absolute_import from inspect import isclass from celery.datastructures import AttributeDict from tower import ugettext_lazy as _ __all__ = ('LOG', 'LOG_BY_ID', 'LOG_KEEP',) class _LOG(object): action_class = None class CREATE_ADDON(_LOG): id = 1 action_class = 'add' format = _(u'{addon} was created.') keep = True class EDIT_PROPERTIES(_LOG): """ Expects: addon """ id = 2 action_class = 'edit' format = _(u'{addon} properties edited.') class EDIT_DESCRIPTIONS(_LOG): id = 3 action_class = 'edit' format = _(u'{addon} description edited.') class EDIT_CATEGORIES(_LOG): id = 4 action_class = 'edit' format = _(u'Categories edited for {addon}.') class ADD_USER_WITH_ROLE(_LOG): id = 5 action_class = 'add' format = _(u'{0.name} ({1}) added to {addon}.') keep = True class REMOVE_USER_WITH_ROLE(_LOG): id = 6 action_class = 'delete' # L10n: {0} is the user being removed, {1} is their role. format = _(u'{0.name} ({1}) removed from {addon}.') keep = True class EDIT_CONTRIBUTIONS(_LOG): id = 7 action_class = 'edit' format = _(u'Contributions for {addon}.') class USER_DISABLE(_LOG): id = 8 format = _(u'{addon} disabled.') keep = True class USER_ENABLE(_LOG): id = 9 format = _(u'{addon} enabled.') keep = True # TODO(davedash): Log these types when pages are present class SET_PUBLIC_STATS(_LOG): id = 10 format = _(u'Stats set public for {addon}.') keep = True # TODO(davedash): Log these types when pages are present class UNSET_PUBLIC_STATS(_LOG): id = 11 format = _(u'{addon} stats set to private.') keep = True class CHANGE_STATUS(_LOG): id = 12 # L10n: {0} is the status format = _(u'{addon} status changed to {0}.') keep = True class ADD_PREVIEW(_LOG): id = 13 action_class = 'add' format = _(u'Preview added to {addon}.') class EDIT_PREVIEW(_LOG): id = 14 action_class = 'edit' format = _(u'Preview edited for {addon}.') class DELETE_PREVIEW(_LOG): id = 15 action_class = 'delete' format = _(u'Preview deleted from {addon}.') class ADD_VERSION(_LOG): id = 16 action_class = 'add' format = _(u'{version} added to {addon}.') keep = True class EDIT_VERSION(_LOG): id = 17 action_class = 'edit' format = _(u'{version} edited for {addon}.') class DELETE_VERSION(_LOG): id = 18 action_class = 'delete' # Note, {0} is a string not a version since the version is deleted. # L10n: {0} is the version number format = _(u'Version {0} deleted from {addon}.') keep = True class ADD_FILE_TO_VERSION(_LOG): id = 19 action_class = 'add' format = _(u'File {0.name} added to {version} of {addon}.') class DELETE_FILE_FROM_VERSION(_LOG): """ Expecting: addon, filename, version Because the file is being deleted, filename and version should be strings and not the object. """ id = 20 action_class = 'delete' format = _(u'File {0} deleted from {version} of {addon}.') class APPROVE_VERSION(_LOG): id = 21 action_class = 'approve' format = _(u'{addon} {version} approved.') short = _(u'Approved') keep = True review_email_user = True review_queue = True class PRELIMINARY_VERSION(_LOG): id = 42 action_class = 'approve' format = _(u'{addon} {version} given preliminary review.') short = _(u'Preliminarily approved') keep = True review_email_user = True review_queue = True class REJECT_VERSION(_LOG): # takes add-on, version, reviewtype id = 43 action_class = 'reject' format = _(u'{addon} {version} rejected.') short = _(u'Rejected') keep = True review_email_user = True review_queue = True class RETAIN_VERSION(_LOG): # takes add-on, version, reviewtype id = 22 format = _(u'{addon} {version} retained.') short = _(u'Retained') keep = True review_email_user = True review_queue = True class ESCALATE_VERSION(_LOG): # takes add-on, version, reviewtype id = 23 format = _(u'{addon} {version} escalated.') short = _(u'Escalated') keep = True review_email_user = True review_queue = True class REQUEST_VERSION(_LOG): # takes add-on, version, reviewtype id = 24 format = _(u'{addon} {version} review requested.') short = _(u'Review requested') keep = True review_email_user = True review_queue = True class REQUEST_INFORMATION(_LOG): id = 44 format = _(u'{addon} {version} more information requested.') short = _(u'More information requested') keep = True review_email_user = True review_queue = True class REQUEST_SUPER_REVIEW(_LOG): id = 45 format = _(u'{addon} {version} super review requested.') short = _(u'Super review requested') keep = True review_queue = True class COMMENT_VERSION(_LOG): id = 49 format = _(u'Comment on {addon} {version}.') short = _(u'Comment') keep = True review_queue = True hide_developer = True class ADD_TAG(_LOG): id = 25 action_class = 'tag' format = _(u'{tag} added to {addon}.') class REMOVE_TAG(_LOG): id = 26 action_class = 'tag' format = _(u'{tag} removed from {addon}.') class ADD_TO_COLLECTION(_LOG): id = 27 action_class = 'collection' format = _(u'{addon} added to {collection}.') class REMOVE_FROM_COLLECTION(_LOG): id = 28 action_class = 'collection' format = _(u'{addon} removed from {collection}.') class ADD_REVIEW(_LOG): id = 29 action_class = 'review' format = _(u'{review} for {addon} written.') # TODO(davedash): Add these when we do the admin site class ADD_RECOMMENDED_CATEGORY(_LOG): id = 31 action_class = 'edit' # L10n: {0} is a category name. format = _(u'{addon} featured in {0}.') class REMOVE_RECOMMENDED_CATEGORY(_LOG): id = 32 action_class = 'edit' # L10n: {0} is a category name. format = _(u'{addon} no longer featured in {0}.') class ADD_RECOMMENDED(_LOG): id = 33 format = _(u'{addon} is now featured.') keep = True class REMOVE_RECOMMENDED(_LOG): id = 34 format = _(u'{addon} is no longer featured.') keep = True class ADD_APPVERSION(_LOG): id = 35 action_class = 'add' # L10n: {0} is the application, {1} is the version of the app format = _(u'{0} {1} added.') class CHANGE_USER_WITH_ROLE(_LOG): """ Expects: author.user, role, addon """ id = 36 # L10n: {0} is a user, {1} is their role format = _(u'{0.name} role changed to {1} for {addon}.') keep = True class CHANGE_LICENSE(_LOG): """ Expects: license, addon """ id = 37 action_class = 'edit' format = _(u'{addon} is now licensed under {0.name}.') class CHANGE_POLICY(_LOG): id = 38 action_class = 'edit' format = _(u'{addon} policy changed.') class CHANGE_ICON(_LOG): id = 39 action_class = 'edit' format = _(u'{addon} icon changed.') class APPROVE_REVIEW(_LOG): id = 40 action_class = 'approve' format = _(u'{review} for {addon} approved.') editor_format = _(u'{user} approved {review} for {addon}.') keep = True editor_event = True class DELETE_REVIEW(_LOG): """Requires review.id and add-on objects.""" id = 41 action_class = 'review' format = _(u'Review {review} for {addon} deleted.') editor_format = _(u'{user} deleted {review} for {addon}.') keep = True editor_event = True class MAX_APPVERSION_UPDATED(_LOG): id = 46 format = _(u'Application max version for {version} updated.') class BULK_VALIDATION_EMAILED(_LOG): id = 47 format = _(u'Authors emailed about compatibility of {version}.') class BULK_VALIDATION_USER_EMAILED(_LOG): id = 130 format = _(u'Email sent to Author about add-on compatibility.') class CHANGE_PASSWORD(_LOG): id = 48 format = _(u'Password changed.') class PAYPAL_FAILED(_LOG): id = 51 format = _(u'{addon} failed checks with PayPal.') class MANIFEST_UPDATED(_LOG): id = 52 format = _(u'{addon} manifest updated.') class APPROVE_VERSION_WAITING(_LOG): id = 53 action_class = 'approve' format = _(u'{addon} {version} approved but waiting to be made public.') short = _(u'Approved but waiting') keep = True review_email_user = True review_queue = True class PURCHASE_ADDON(_LOG): id = 54 format = _(u'{addon} purchased.') class INSTALL_ADDON(_LOG): id = 55 format = _(u'{addon} installed.') class USER_EDITED(_LOG): id = 60 format = _(u'Account updated.') class ESCALATION_CLEARED(_LOG): id = 66 format = _(u'Escalation cleared for {addon}.') short = _(u'Escalation cleared') keep = True review_queue = True class APP_DISABLED(_LOG): id = 67 format = _(u'{addon} disabled.') short = _(u'App disabled') keep = True review_queue = True class ESCALATED_HIGH_ABUSE(_LOG): id = 68 format = _(u'{addon} escalated because of high number of abuse reports.') short = _(u'High Abuse Reports') keep = True review_queue = True class ESCALATE_MANUAL(_LOG): id = 73 format = _(u'{addon} escalated by reviewer.') short = _(u'Reviewer escalation') keep = True review_queue = True # TODO(robhudson): Escalation log for editor escalation.. class VIDEO_ERROR(_LOG): id = 74 format = _(u'Video removed from {addon} because of a problem with ' u'the video. ') short = _(u'Video removed') class REREVIEW_DEVICES_ADDED(_LOG): id = 75 format = _(u'{addon} re-review because of new device(s) added.') short = _(u'Device(s) Added') keep = True review_queue = True class REVIEW_DEVICE_OVERRIDE(_LOG): id = 76 format = _(u'{addon} device support manually changed by reviewer.') short = _(u'Device(s) Changed by Reviewer') keep = True review_queue = True class CUSTOM_TEXT(_LOG): id = 98 format = '{0}' class CUSTOM_HTML(_LOG): id = 99 format = '{0}' class OBJECT_ADDED(_LOG): id = 100 format = _(u'Created: {0}.') admin_event = True class OBJECT_EDITED(_LOG): id = 101 format = _(u'Edited field: {2} set to: {0}.') admin_event = True class OBJECT_DELETED(_LOG): id = 102 format = _(u'Deleted: {1}.') admin_event = True class ADMIN_USER_EDITED(_LOG): id = 103 format = _(u'User {user} edited, reason: {1}') admin_event = True class ADMIN_USER_ANONYMIZED(_LOG): id = 104 format = _(u'User {user} anonymized.') admin_event = True class ADMIN_USER_RESTRICTED(_LOG): id = 105 format = _(u'User {user} restricted.') admin_event = True class ADMIN_VIEWED_LOG(_LOG): id = 106 format = _(u'Admin {0} viewed activity log for {user}.') admin_event = True class EDIT_REVIEW(_LOG): id = 107 action_class = 'review' format = _(u'{review} for {addon} updated.') class THEME_REVIEW(_LOG): id = 108 action_class = 'review' format = _(u'{addon} reviewed.') class GROUP_USER_ADDED(_LOG): id = 120 action_class = 'access' format = _(u'User {0.name} added to {group}.') keep = True admin_event = True class GROUP_USER_REMOVED(_LOG): id = 121 action_class = 'access' format = _(u'User {0.name} removed from {group}.') keep = True admin_event = True class REVIEW_FEATURES_OVERRIDE(_LOG): id = 122 format = _(u'{addon} minimum requirements manually changed by reviewer.') short = _(u'Requirements Changed by Reviewer') keep = True review_queue = True class REREVIEW_FEATURES_CHANGED(_LOG): id = 123 format = _(u'{addon} minimum requirements manually changed.') short = _(u'Requirements Changed') keep = True review_queue = True class CHANGE_VERSION_STATUS(_LOG): id = 124 # L10n: {0} is the status format = _(u'{version} status changed to {0}.') keep = True class DELETE_USER_LOOKUP(_LOG): id = 125 # L10n: {0} is the status format = _(u'User {0.name} {0.id} deleted via lookup tool.') keep = True class CONTENT_RATING_TO_ADULT(_LOG): id = 126 format = _('{addon} content rating changed to Adult.') review_queue = True class CONTENT_RATING_CHANGED(_LOG): id = 127 format = _('{addon} content rating changed.') class ADDON_UNLISTED(_LOG): id = 128 format = _(u'{addon} unlisted.') keep = True class BETA_SIGNED_VALIDATION_PASSED(_LOG): id = 131 format = _(u'{file} was signed.') keep = True class BETA_SIGNED_VALIDATION_FAILED(_LOG): id = 132 format = _(u'{file} was signed.') keep = True class DELETE_ADDON(_LOG): id = 133 action_class = 'delete' # L10n: {0} is the add-on GUID. format = _(u'Addon id {0} with GUID {1} has been deleted') keep = True LOGS = [x for x in vars().values() if isclass(x) and issubclass(x, _LOG) and x != _LOG] # Make sure there's no duplicate IDs. assert len(LOGS) == len(set(log.id for log in LOGS)) LOG_BY_ID = dict((l.id, l) for l in LOGS) LOG = AttributeDict((l.__name__, l) for l in LOGS) LOG_ADMINS = [l.id for l in LOGS if hasattr(l, 'admin_event')] LOG_KEEP = [l.id for l in LOGS if hasattr(l, 'keep')] LOG_EDITORS = [l.id for l in LOGS if hasattr(l, 'editor_event')] LOG_REVIEW_QUEUE = [l.id for l in LOGS if hasattr(l, 'review_queue')] # Is the user emailed the message? LOG_REVIEW_EMAIL_USER = [l.id for l in LOGS if hasattr(l, 'review_email_user')] # Logs *not* to show to the developer. LOG_HIDE_DEVELOPER = [l.id for l in LOGS if (getattr(l, 'hide_developer', False) or l.id in LOG_ADMINS)] def log(action, *args, **kw): """ e.g. amo.log(amo.LOG.CREATE_ADDON, []), amo.log(amo.LOG.ADD_FILE_TO_VERSION, file, version) """ from access.models import Group from addons.models import Addon from amo import get_user, logger_log from devhub.models import (ActivityLog, AddonLog, CommentLog, GroupLog, UserLog, VersionLog) from users.models import UserProfile from versions.models import Version user = kw.get('user', get_user()) if not user: logger_log.warning('Activity log called with no user: %s' % action.id) return al = ActivityLog(user=user, action=action.id) al.arguments = args if 'details' in kw: al.details = kw['details'] al.save() if 'details' in kw and 'comments' in al.details: CommentLog(comments=al.details['comments'], activity_log=al).save() # TODO(davedash): post-remora this may not be necessary. if 'created' in kw: al.created = kw['created'] # Double save necessary since django resets the created date on save. al.save() for arg in args: if isinstance(arg, tuple): if arg[0] == Addon: AddonLog(addon_id=arg[1], activity_log=al).save() elif arg[0] == Version: VersionLog(version_id=arg[1], activity_log=al).save() elif arg[0] == UserProfile: UserLog(user_id=arg[1], activity_log=al).save() elif arg[0] == Group: GroupLog(group_id=arg[1], activity_log=al).save() elif isinstance(arg, Addon): AddonLog(addon=arg, activity_log=al).save() elif isinstance(arg, Version): VersionLog(version=arg, activity_log=al).save() elif isinstance(arg, UserProfile): # Index by any user who is mentioned as an argument. UserLog(activity_log=al, user=arg).save() elif isinstance(arg, Group): GroupLog(group=arg, activity_log=al).save() # Index by every user UserLog(activity_log=al, user=user).save() return al
muffinresearch/addons-server
apps/amo/log.py
Python
bsd-3-clause
16,053
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "cc/scheduler/scheduler_settings.h" #include "cc/trees/layer_tree_settings.h" namespace cc { SchedulerSettings::SchedulerSettings() : begin_frame_scheduling_enabled(true), main_frame_before_draw_enabled(true), main_frame_before_activation_enabled(false), impl_side_painting(false), timeout_and_draw_when_animation_checkerboards(true), maximum_number_of_failed_draws_before_draw_is_forced_(3), using_synchronous_renderer_compositor(false), throttle_frame_production(true) { } SchedulerSettings::SchedulerSettings(const LayerTreeSettings& settings) : begin_frame_scheduling_enabled(settings.begin_frame_scheduling_enabled), main_frame_before_draw_enabled(settings.main_frame_before_draw_enabled), main_frame_before_activation_enabled( settings.main_frame_before_activation_enabled), impl_side_painting(settings.impl_side_painting), timeout_and_draw_when_animation_checkerboards( settings.timeout_and_draw_when_animation_checkerboards), maximum_number_of_failed_draws_before_draw_is_forced_( settings.maximum_number_of_failed_draws_before_draw_is_forced_), using_synchronous_renderer_compositor( settings.using_synchronous_renderer_compositor), throttle_frame_production(settings.throttle_frame_production) { } SchedulerSettings::~SchedulerSettings() {} scoped_ptr<base::Value> SchedulerSettings::AsValue() const { scoped_ptr<base::DictionaryValue> state(new base::DictionaryValue); state->SetBoolean("begin_frame_scheduling_enabled", begin_frame_scheduling_enabled); state->SetBoolean("main_frame_before_draw_enabled", main_frame_before_draw_enabled); state->SetBoolean("main_frame_before_activation_enabled", main_frame_before_activation_enabled); state->SetBoolean("impl_side_painting", impl_side_painting); state->SetBoolean("timeout_and_draw_when_animation_checkerboards", timeout_and_draw_when_animation_checkerboards); state->SetInteger("maximum_number_of_failed_draws_before_draw_is_forced_", maximum_number_of_failed_draws_before_draw_is_forced_); state->SetBoolean("using_synchronous_renderer_compositor", using_synchronous_renderer_compositor); state->SetBoolean("throttle_frame_production", throttle_frame_production); return state.PassAs<base::Value>(); } } // namespace cc
boundarydevices/android_external_chromium_org
cc/scheduler/scheduler_settings.cc
C++
bsd-3-clause
2,639
// Copyright 2007 The Closure Library Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview DOM pattern to match a sequence of other patterns. */ goog.provide('goog.dom.pattern.Sequence'); goog.require('goog.dom.NodeType'); goog.require('goog.dom.pattern'); goog.require('goog.dom.pattern.AbstractPattern'); goog.require('goog.dom.pattern.MatchType'); /** * Pattern object that matches a sequence of other patterns. * * @param {Array<goog.dom.pattern.AbstractPattern>} patterns Ordered array of * patterns to match. * @param {boolean=} opt_ignoreWhitespace Optional flag to ignore text nodes * consisting entirely of whitespace. The default is to not ignore them. * @constructor * @extends {goog.dom.pattern.AbstractPattern} * @final */ goog.dom.pattern.Sequence = function(patterns, opt_ignoreWhitespace) { /** * Ordered array of patterns to match. * * @type {Array<goog.dom.pattern.AbstractPattern>} */ this.patterns = patterns; /** * Whether or not to ignore whitespace only Text nodes. * * @private {boolean} */ this.ignoreWhitespace_ = !!opt_ignoreWhitespace; /** * Position in the patterns array we have reached by successful matches. * * @private {number} */ this.currentPosition_ = 0; }; goog.inherits(goog.dom.pattern.Sequence, goog.dom.pattern.AbstractPattern); /** * Regular expression for breaking text nodes. * @private {!RegExp} */ goog.dom.pattern.Sequence.BREAKING_TEXTNODE_RE_ = /^\s*$/; /** * Test whether the given token starts, continues, or finishes the sequence * of patterns given in the constructor. * * @param {Node} token Token to match against. * @param {goog.dom.TagWalkType} type The type of token. * @return {goog.dom.pattern.MatchType} <code>MATCH</code> if the pattern * matches, <code>MATCHING</code> if the pattern starts a match, and * <code>NO_MATCH</code> if the pattern does not match. * @override */ goog.dom.pattern.Sequence.prototype.matchToken = function(token, type) { // If the option is set, ignore any whitespace only text nodes if (this.ignoreWhitespace_ && token.nodeType == goog.dom.NodeType.TEXT && goog.dom.pattern.Sequence.BREAKING_TEXTNODE_RE_.test(token.nodeValue)) { return goog.dom.pattern.MatchType.MATCHING; } switch (this.patterns[this.currentPosition_].matchToken(token, type)) { case goog.dom.pattern.MatchType.MATCH: // Record the first token we match. if (this.currentPosition_ == 0) { this.matchedNode = token; } // Move forward one position. this.currentPosition_++; // Check if this is the last position. if (this.currentPosition_ == this.patterns.length) { this.reset(); return goog.dom.pattern.MatchType.MATCH; } else { return goog.dom.pattern.MatchType.MATCHING; } case goog.dom.pattern.MatchType.MATCHING: // This can happen when our child pattern is a sequence or a repetition. return goog.dom.pattern.MatchType.MATCHING; case goog.dom.pattern.MatchType.BACKTRACK_MATCH: // This means a repetitive match succeeded 1 token ago. // TODO(robbyw): Backtrack further if necessary. this.currentPosition_++; if (this.currentPosition_ == this.patterns.length) { this.reset(); return goog.dom.pattern.MatchType.BACKTRACK_MATCH; } else { // Retry the same token on the next pattern. return this.matchToken(token, type); } default: this.reset(); return goog.dom.pattern.MatchType.NO_MATCH; } }; /** * Reset any internal state this pattern keeps. * @override */ goog.dom.pattern.Sequence.prototype.reset = function() { if (this.patterns[this.currentPosition_]) { this.patterns[this.currentPosition_].reset(); } this.currentPosition_ = 0; };
scheib/chromium
third_party/google-closure-library/closure/goog/dom/pattern/sequence.js
JavaScript
bsd-3-clause
4,393
"""test a warning is triggered when using for a lists comprehension variable""" __revision__ = 'yo' TEST_LC = [C for C in __revision__ if C.isalpha()] print C # WARN C = 4 print C # this one shouldn't trigger any warning B = [B for B in __revision__ if B.isalpha()] print B # nor this one for var1, var2 in TEST_LC: var1 = var2 + 4 print var1 # WARN for note in __revision__: note.something() for line in __revision__: for note in line: A = note.anotherthing() for x in []: pass for x in range(3): print (lambda : x)() # OK
dbbhattacharya/kitsune
vendor/packages/pylint/test/input/func_use_for_or_listcomp_var.py
Python
bsd-3-clause
560
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ui/app_list/search/people/people_result.h" #include <vector> #include "base/bind.h" #include "base/memory/ref_counted.h" #include "base/strings/utf_string_conversions.h" #include "chrome/browser/profiles/profile.h" #include "chrome/browser/signin/profile_oauth2_token_service_factory.h" #include "chrome/browser/signin/signin_manager_factory.h" #include "chrome/browser/ui/app_list/search/common/url_icon_source.h" #include "chrome/browser/ui/app_list/search/people/person.h" #include "chrome/browser/ui/browser_navigator.h" #include "chrome/common/extensions/api/hangouts_private.h" #include "components/signin/core/browser/profile_oauth2_token_service.h" #include "components/signin/core/browser/signin_manager.h" #include "content/public/browser/user_metrics.h" #include "extensions/browser/event_router.h" #include "grit/generated_resources.h" #include "grit/theme_resources.h" #include "ui/base/l10n/l10n_util.h" #include "ui/base/resource/resource_bundle.h" namespace OnHangoutRequested = extensions::api::hangouts_private::OnHangoutRequested; using extensions::api::hangouts_private::User; using extensions::api::hangouts_private::HangoutRequest; namespace { const int kIconSize = 32; const char kImageSizePath[] = "s64-p/"; const char kEmailUrlPrefix[] = "mailto:"; const char* const kHangoutsExtensionIds[] = { "nckgahadagoaajjgafhacjanaoiihapd", "ljclpkphhpbpinifbeabbhlfddcpfdde", "ppleadejekpmccmnpjdimmlfljlkdfej", "eggnbpckecmjlblplehfpjjdhhidfdoj", "jfjjdfefebklmdbmenmlehlopoocnoeh", "knipolnnllmklapflnccelgolnpehhpl" }; // Add a query parameter to specify the size to fetch the image in. The // original profile image can be of an arbitrary size, we ask the server to // crop it to a square 64x64 using its smart cropping algorithm. GURL GetImageUrl(const GURL& url) { std::string image_filename = url.ExtractFileName(); if (image_filename.empty()) return url; return url.Resolve(kImageSizePath + image_filename); } } // namespace namespace app_list { PeopleResult::PeopleResult(Profile* profile, scoped_ptr<Person> person) : profile_(profile), person_(person.Pass()), weak_factory_(this) { set_id(person_->id); set_title(base::UTF8ToUTF16(person_->display_name)); set_relevance(person_->interaction_rank); set_details(base::UTF8ToUTF16(person_->email)); RefreshHangoutsExtensionId(); SetDefaultActions(); image_ = gfx::ImageSkia( new UrlIconSource(base::Bind(&PeopleResult::OnIconLoaded, weak_factory_.GetWeakPtr()), profile_->GetRequestContext(), GetImageUrl(person_->image_url), kIconSize, IDR_PROFILE_PICTURE_LOADING), gfx::Size(kIconSize, kIconSize)); SetIcon(image_); } PeopleResult::~PeopleResult() { } void PeopleResult::Open(int event_flags) { // Action 0 will always be our default action. InvokeAction(0, event_flags); } void PeopleResult::InvokeAction(int action_index, int event_flags) { if (hangouts_extension_id_.empty()) { // If the hangouts app is not available, the only option we are showing // to the user is 'Send Email'. SendEmail(); } else { switch (action_index) { case 0: OpenChat(); break; case 1: SendEmail(); break; default: LOG(ERROR) << "Invalid people search action: " << action_index; } } } scoped_ptr<ChromeSearchResult> PeopleResult::Duplicate() { return scoped_ptr<ChromeSearchResult>( new PeopleResult(profile_, person_->Duplicate().Pass())).Pass(); } void PeopleResult::OnIconLoaded() { // Remove the existing image reps since the icon data is loaded and they // need to be re-created. const std::vector<gfx::ImageSkiaRep>& image_reps = image_.image_reps(); for (size_t i = 0; i < image_reps.size(); ++i) image_.RemoveRepresentation(image_reps[i].scale()); SetIcon(image_); } void PeopleResult::SetDefaultActions() { Actions actions; ui::ResourceBundle& bundle = ui::ResourceBundle::GetSharedInstance(); if (!hangouts_extension_id_.empty()) { actions.push_back(Action( *bundle.GetImageSkiaNamed(IDR_PEOPLE_SEARCH_ACTION_CHAT), *bundle.GetImageSkiaNamed(IDR_PEOPLE_SEARCH_ACTION_CHAT_HOVER), *bundle.GetImageSkiaNamed(IDR_PEOPLE_SEARCH_ACTION_CHAT_PRESSED), l10n_util::GetStringUTF16(IDS_PEOPLE_SEARCH_ACTION_CHAT_TOOLTIP))); } actions.push_back(Action( *bundle.GetImageSkiaNamed(IDR_PEOPLE_SEARCH_ACTION_EMAIL), *bundle.GetImageSkiaNamed(IDR_PEOPLE_SEARCH_ACTION_EMAIL_HOVER), *bundle.GetImageSkiaNamed(IDR_PEOPLE_SEARCH_ACTION_EMAIL_PRESSED), l10n_util::GetStringUTF16(IDS_PEOPLE_SEARCH_ACTION_EMAIL_TOOLTIP))); SetActions(actions); } void PeopleResult::OpenChat() { HangoutRequest request; request.type = extensions::api::hangouts_private::HANGOUT_TYPE_CHAT; // from: the user this chat request is originating from. SigninManagerBase* signin_manager = SigninManagerFactory::GetInstance()->GetForProfile(profile_); DCHECK(signin_manager); request.from = signin_manager->GetAuthenticatedAccountId(); // to: list of users with whom to start this hangout is with. linked_ptr<User> target(new User()); target->id = person_->owner_id; request.to.push_back(target); scoped_ptr<extensions::Event> event( new extensions::Event(OnHangoutRequested::kEventName, OnHangoutRequested::Create(request))); // TODO(rkc): Change this once we remove the hangoutsPrivate API. // See crbug.com/306672 extensions::EventRouter::Get(profile_) ->DispatchEventToExtension(hangouts_extension_id_, event.Pass()); content::RecordAction(base::UserMetricsAction("PeopleSearch_OpenChat")); } void PeopleResult::SendEmail() { chrome::NavigateParams params(profile_, GURL(kEmailUrlPrefix + person_->email), content::PAGE_TRANSITION_LINK); // If no window exists, this will open a new window this one tab. params.disposition = NEW_FOREGROUND_TAB; chrome::Navigate(&params); content::RecordAction(base::UserMetricsAction("PeopleSearch_SendEmail")); } void PeopleResult::RefreshHangoutsExtensionId() { // TODO(rkc): Change this once we remove the hangoutsPrivate API. // See crbug.com/306672 for (size_t i = 0; i < arraysize(kHangoutsExtensionIds); ++i) { if (extensions::EventRouter::Get(profile_)->ExtensionHasEventListener( kHangoutsExtensionIds[i], OnHangoutRequested::kEventName)) { hangouts_extension_id_ = kHangoutsExtensionIds[i]; return; } } hangouts_extension_id_.clear(); } ChromeSearchResultType PeopleResult::GetType() { return SEARCH_PEOPLE_SEARCH_RESULT; } } // namespace app_list
boundarydevices/android_external_chromium_org
chrome/browser/ui/app_list/search/people/people_result.cc
C++
bsd-3-clause
7,006
# Copyright 2017 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import py_utils from telemetry import story as story_module from telemetry.page import page as page_module from telemetry.page import shared_page_state class LeakDetectionSharedState(shared_page_state.SharedDesktopPageState): def ShouldReuseBrowserForAllStoryRuns(self): return True class LeakDetectionPage(page_module.Page): def __init__(self, url, page_set, name=''): super(LeakDetectionPage, self).__init__( url=url, page_set=page_set, name=name, shared_page_state_class=LeakDetectionSharedState) def RunNavigateSteps(self, action_runner): tabs = action_runner.tab.browser.tabs new_tab = tabs.New() new_tab.action_runner.Navigate('about:blank') new_tab.action_runner.PrepareForLeakDetection() new_tab.action_runner.MeasureMemory() new_tab.action_runner.Navigate(self.url) self._WaitForPageLoadToComplete(new_tab.action_runner) new_tab.action_runner.Navigate('about:blank') new_tab.action_runner.PrepareForLeakDetection() new_tab.action_runner.MeasureMemory() new_tab.Close() def _WaitForPageLoadToComplete(self, action_runner): py_utils.WaitFor(action_runner.tab.HasReachedQuiescence, timeout=30) # Some websites have a script that loads resources continuously, in which cases # HasReachedQuiescence would not be reached. This class waits for document ready # state to be complete to avoid timeout for those pages. class ResourceLoadingLeakDetectionPage(LeakDetectionPage): def _WaitForPageLoadToComplete(self, action_runner): action_runner.tab.WaitForDocumentReadyStateToBeComplete() class LeakDetectionStorySet(story_module.StorySet): def __init__(self): super(LeakDetectionStorySet, self).__init__( archive_data_file='data/leak_detection.json', cloud_storage_bucket=story_module.PARTNER_BUCKET) urls_list = [ # Alexa top websites 'https://www.google.com', 'https://www.youtube.com', 'https://www.facebook.com', 'https://www.baidu.com', 'https://www.wikipedia.org', 'https://world.taobao.com/', 'https://www.tmall.com/', 'http://www.amazon.com', 'http://www.twitter.com', 'https://www.instagram.com/', 'http://www.jd.com/', 'https://vk.com/', 'https://outlook.live.com', 'https://www.reddit.com/', 'https://weibo.com/', 'https://www.sina.com.cn/', 'https://www.360.cn/', 'https://yandex.ru/', 'https://www.blogger.com/', 'https://www.netflix.com/', 'https://www.pornhub.com/', 'https://www.linkedin.com/', 'https://www.yahoo.co.jp/', 'https://www.csdn.net/', 'https://www.alipay.com/', 'https://www.twitch.tv/', # TODO(keishi): Memory dump fails flakily crbug.com/963273 #'https://www.ebay.com/', # TODO(keishi): Memory dump fails flakily crbug.com/963273 #'https://www.microsoft.com/', # TODO(keishi): Memory dump fails flakily crbug.com/963273 #'https://www.xvideos.com/', 'https://mail.ru/', 'https://www.bing.com/', 'http://www.wikia.com/', 'https://www.office.com/', 'https://www.imdb.com/', 'https://www.aliexpress.com/', 'https://www.msn.com/', 'https://news.google.com/', 'https://www.theguardian.com/', 'https://www.indiatimes.com/', # TODO(keishi): Memory dump fails flakily crbug.com/963273 #'http://www.foxnews.com/', 'https://weather.com/', 'https://www.shutterstock.com/', 'https://docs.google.com/', 'https://wordpress.com/', # TODO(yuzus): This test crashes. # 'https://www.apple.com/', 'https://play.google.com/store', 'https://www.dropbox.com/', 'https://soundcloud.com/', 'https://vimeo.com/', 'https://www.slideshare.net/', 'https://www.mediafire.com/', 'https://www.etsy.com/', 'https://www.ikea.com/', 'https://www.bestbuy.com/', 'https://www.homedepot.com/', # TODO(keishi): Memory dump fails flakily crbug.com/963273 #'https://www.target.com/', 'https://www.booking.com/', 'https://www.tripadvisor.com/', 'https://9gag.com/', 'https://www.expedia.com/', 'https://www.roblox.com/', 'https://www.gamespot.com/', 'https://www.blizzard.com', # TODO(keishi): Memory dump fails flakily crbug.com/963273 #'https://ign.com/', 'https://www.yelp.com/', # Times out waiting for HasReachedQuiescence - crbug.com/927427 # 'https://gizmodo.com/', 'https://www.gsmarena.com/', 'https://www.theverge.com/', 'https://www.nlm.nih.gov/', 'https://archive.org/', 'https://www.udemy.com/', 'https://answers.yahoo.com/', # TODO(crbug.com/985552): Memory dump fails flakily. # 'https://www.goodreads.com/', 'https://www.cricbuzz.com/', 'http://www.goal.com/', 'http://siteadvisor.com/', 'https://www.patreon.com/', 'https://www.jw.org/', 'http://europa.eu/', 'https://translate.google.com/', 'https://www.epicgames.com/', 'http://www.reverso.net/', 'https://play.na.leagueoflegends.com/', 'https://www.thesaurus.com/', 'https://www.weebly.com/', 'https://www.deviantart.com/', 'https://www.scribd.com/', 'https://www.hulu.com/', 'https://www.xfinity.com/', # India Alexa top websites 'https://porn555.com/', 'https://www.onlinesbi.com/', 'https://www.flipkart.com/', 'https://www.hotstar.com/', 'https://www.incometaxindiaefiling.gov.in/', 'https://stackoverflow.com/', # TODO(crbug.com/1005035) Memory dump fails flakily. # 'https://www.irctc.co.in/nget/', 'https://www.hdfcbank.com/', 'https://www.whatsapp.com/', 'https://uidai.gov.in/', 'https://billdesk.com/', 'https://www.icicibank.com/', # US Alexa top websites 'https://imgur.com/', 'https://www.craigslist.org/', 'https://www.chase.com/', # TODO(892352): tumblr started timing out due to a catapult roll. See # https://crbug.com/892352 # 'https://www.tumblr.com/', 'https://www.paypal.com/', # TODO(yuzus): espn.com is flaky. https://crbug.com/959796 #'http://www.espn.com/', 'https://edition.cnn.com/', 'https://www.pinterest.com/', # TODO(keishi): Memory dump fails flakily crbug.com/963273 #'https://www.nytimes.com/', 'https://github.com/', 'https://www.salesforce.com/', # Japan Alexa top websites 'https://www.rakuten.co.jp/', 'http://www.nicovideo.jp/', 'https://fc2.com/', 'https://ameblo.jp/', 'http://kakaku.com/', 'https://www.goo.ne.jp/', 'https://www.pixiv.net/', # websites which were found to be leaking in the past 'https://www.prezi.com', # TODO(keishi): Memory dump fails flakily crbug.com/963273 #'http://www.time.com', 'http://www.cheapoair.com', 'http://www.onlinedown.net', 'http://www.dailypost.ng', 'http://www.aljazeera.net', 'http://www.googleapps.com', 'http://www.airbnb.ch', 'http://www.livedoor.jp', 'http://www.blu-ray.com', # TODO(953195): Test times out. # 'http://www.block.io', 'http://www.hockeybuzz.com', 'http://www.silverpop.com', 'http://www.ansa.it', 'http://www.gulfair.com', 'http://www.nusatrip.com', 'http://www.samsung-fun.ru', 'http://www.opentable.com', 'http://www.magnetmail.net', 'http://zzz.com.ua', 'http://a-rakumo.appspot.com', 'http://www.sakurafile.com', 'http://www.psiexams.com', 'http://www.contentful.com', 'http://www.estibot.com', 'http://www.mbs.de', 'http://www.zhengjie.com', 'http://www.sjp.pl', 'http://www.mastodon.social', 'http://www.horairetrain.net', 'http://www.torrentzeu.to', 'http://www.inbank.it', 'http://www.gradpoint.com', 'http://www.mail.bg', 'http://www.aaannunci.it', 'http://www.leandomainsearch.com', 'http://www.wpjam.com', 'http://www.nigma.ru', 'http://www.do-search.com', 'http://www.omniboxes.com', 'http://whu.edu.cn', 'http://support.wordpress.com', 'http://www.webwebweb.com', 'http://www.sick.com', 'http://www.iowacconline.com', 'http://hdu.edu.cn', 'http://www.register.com', 'http://www.careesma.in', 'http://www.bestdic.ir', 'http://www.privacyassistant.net', 'http://www.sklavenzentrale.com', 'http://www.podbay.fm', 'http://www.coco.fr', 'http://www.skipaas.com', 'http://www.chatword.org', 'http://www.ezcardinfo.com', 'http://www.daydao.com', 'http://www.expediapartnercentral.com', 'http://www.22find.com', 'http://www.e-shop.gr', 'http://www.indeed.com', 'http://www.highwaybus.com', 'http://www.pingpang.info', 'http://www.besgold.com', 'http://www.arabam.com', 'http://makfax.com.mk', 'http://game.co.za', 'http://www.savaari.com', 'http://www.railsguides.jp', ] resource_loading_urls_list = [ 'https://www.hotels.com/', 'https://www.livejournal.com/', # TODO(keishi): Memory dump fails flakily crbug.com/963273 #'https://www.yahoo.com', 'http://www.quora.com', 'https://www.macys.com', 'http://infomoney.com.br', 'http://www.listindiario.com', 'https://www.engadget.com/', 'https://www.sohu.com/', 'http://www.qq.com', 'http://www.benzworld.org', 'http://www.520mojing.com', ] for url in urls_list: self.AddStory(LeakDetectionPage(url, self, url)) for url in resource_loading_urls_list: self.AddStory(ResourceLoadingLeakDetectionPage(url, self, url))
chromium/chromium
tools/perf/contrib/leak_detection/page_sets.py
Python
bsd-3-clause
10,066
// libjingle // Copyright 2004 Google Inc. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // 3. The name of the author may not be used to endorse or promote products // derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED // WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO // EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; // OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR // OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF // ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Implementation of VideoRecorder and FileVideoCapturer. #include "talk/media/devices/filevideocapturer.h" #include "talk/base/bytebuffer.h" #include "talk/base/logging.h" #include "talk/base/thread.h" namespace cricket { ///////////////////////////////////////////////////////////////////// // Implementation of class VideoRecorder ///////////////////////////////////////////////////////////////////// bool VideoRecorder::Start(const std::string& filename, bool write_header) { Stop(); write_header_ = write_header; int err; if (!video_file_.Open(filename, "wb", &err)) { LOG(LS_ERROR) << "Unable to open file " << filename << " err=" << err; return false; } return true; } void VideoRecorder::Stop() { video_file_.Close(); } bool VideoRecorder::RecordFrame(const CapturedFrame& frame) { if (talk_base::SS_CLOSED == video_file_.GetState()) { LOG(LS_ERROR) << "File not opened yet"; return false; } uint32 size = 0; if (!frame.GetDataSize(&size)) { LOG(LS_ERROR) << "Unable to calculate the data size of the frame"; return false; } if (write_header_) { // Convert the frame header to bytebuffer. talk_base::ByteBuffer buffer; buffer.WriteUInt32(frame.width); buffer.WriteUInt32(frame.height); buffer.WriteUInt32(frame.fourcc); buffer.WriteUInt32(frame.pixel_width); buffer.WriteUInt32(frame.pixel_height); buffer.WriteUInt64(frame.elapsed_time); buffer.WriteUInt64(frame.time_stamp); buffer.WriteUInt32(size); // Write the bytebuffer to file. if (talk_base::SR_SUCCESS != video_file_.Write(buffer.Data(), buffer.Length(), NULL, NULL)) { LOG(LS_ERROR) << "Failed to write frame header"; return false; } } // Write the frame data to file. if (talk_base::SR_SUCCESS != video_file_.Write(frame.data, size, NULL, NULL)) { LOG(LS_ERROR) << "Failed to write frame data"; return false; } return true; } /////////////////////////////////////////////////////////////////////// // Definition of private class FileReadThread that periodically reads // frames from a file. /////////////////////////////////////////////////////////////////////// class FileVideoCapturer::FileReadThread : public talk_base::Thread, public talk_base::MessageHandler { public: explicit FileReadThread(FileVideoCapturer* capturer) : capturer_(capturer), finished_(false) { } // Override virtual method of parent Thread. Context: Worker Thread. virtual void Run() { // Read the first frame and start the message pump. The pump runs until // Stop() is called externally or Quit() is called by OnMessage(). int waiting_time_ms = 0; if (capturer_ && capturer_->ReadFrame(true, &waiting_time_ms)) { PostDelayed(waiting_time_ms, this); Thread::Run(); } finished_ = true; } // Override virtual method of parent MessageHandler. Context: Worker Thread. virtual void OnMessage(talk_base::Message* /*pmsg*/) { int waiting_time_ms = 0; if (capturer_ && capturer_->ReadFrame(false, &waiting_time_ms)) { PostDelayed(waiting_time_ms, this); } else { Quit(); } } // Check if Run() is finished. bool Finished() const { return finished_; } private: FileVideoCapturer* capturer_; bool finished_; DISALLOW_COPY_AND_ASSIGN(FileReadThread); }; ///////////////////////////////////////////////////////////////////// // Implementation of class FileVideoCapturer ///////////////////////////////////////////////////////////////////// static const int64 kNumNanoSecsPerMilliSec = 1000000; const char* FileVideoCapturer::kVideoFileDevicePrefix = "video-file:"; FileVideoCapturer::FileVideoCapturer() : frame_buffer_size_(0), file_read_thread_(NULL), repeat_(0), start_time_ns_(0), last_frame_timestamp_ns_(0), ignore_framerate_(false) { } FileVideoCapturer::~FileVideoCapturer() { Stop(); delete[] static_cast<char*>(captured_frame_.data); } bool FileVideoCapturer::Init(const Device& device) { if (!FileVideoCapturer::IsFileVideoCapturerDevice(device)) { return false; } std::string filename(device.name); if (IsRunning()) { LOG(LS_ERROR) << "The file video capturer is already running"; return false; } // Open the file. int err; if (!video_file_.Open(filename, "rb", &err)) { LOG(LS_ERROR) << "Unable to open the file " << filename << " err=" << err; return false; } // Read the first frame's header to determine the supported format. CapturedFrame frame; if (talk_base::SR_SUCCESS != ReadFrameHeader(&frame)) { LOG(LS_ERROR) << "Failed to read the first frame header"; video_file_.Close(); return false; } // Seek back to the start of the file. if (!video_file_.SetPosition(0)) { LOG(LS_ERROR) << "Failed to seek back to beginning of the file"; video_file_.Close(); return false; } // Enumerate the supported formats. We have only one supported format. We set // the frame interval to kMinimumInterval here. In Start(), if the capture // format's interval is greater than kMinimumInterval, we use the interval; // otherwise, we use the timestamp in the file to control the interval. VideoFormat format(frame.width, frame.height, VideoFormat::kMinimumInterval, frame.fourcc); std::vector<VideoFormat> supported; supported.push_back(format); SetId(device.id); SetSupportedFormats(supported); return true; } bool FileVideoCapturer::Init(const std::string& filename) { return Init(FileVideoCapturer::CreateFileVideoCapturerDevice(filename)); } CaptureState FileVideoCapturer::Start(const VideoFormat& capture_format) { if (IsRunning()) { LOG(LS_ERROR) << "The file video capturer is already running"; return CS_FAILED; } if (talk_base::SS_CLOSED == video_file_.GetState()) { LOG(LS_ERROR) << "File not opened yet"; return CS_NO_DEVICE; } else if (!video_file_.SetPosition(0)) { LOG(LS_ERROR) << "Failed to seek back to beginning of the file"; return CS_FAILED; } SetCaptureFormat(&capture_format); // Create a thread to read the file. file_read_thread_ = new FileReadThread(this); bool ret = file_read_thread_->Start(); start_time_ns_ = kNumNanoSecsPerMilliSec * static_cast<int64>(talk_base::Time()); if (ret) { LOG(LS_INFO) << "File video capturer '" << GetId() << "' started"; return CS_RUNNING; } else { LOG(LS_ERROR) << "File video capturer '" << GetId() << "' failed to start"; return CS_FAILED; } } bool FileVideoCapturer::IsRunning() { return file_read_thread_ && !file_read_thread_->Finished(); } void FileVideoCapturer::Stop() { if (file_read_thread_) { file_read_thread_->Stop(); file_read_thread_ = NULL; LOG(LS_INFO) << "File video capturer '" << GetId() << "' stopped"; } SetCaptureFormat(NULL); } bool FileVideoCapturer::GetPreferredFourccs(std::vector<uint32>* fourccs) { if (!fourccs) { return false; } fourccs->push_back(GetSupportedFormats()->at(0).fourcc); return true; } talk_base::StreamResult FileVideoCapturer::ReadFrameHeader( CapturedFrame* frame) { // We first read kFrameHeaderSize bytes from the file stream to a memory // buffer, then construct a bytebuffer from the memory buffer, and finally // read the frame header from the bytebuffer. char header[CapturedFrame::kFrameHeaderSize]; talk_base::StreamResult sr; size_t bytes_read; int error; sr = video_file_.Read(header, CapturedFrame::kFrameHeaderSize, &bytes_read, &error); LOG(LS_VERBOSE) << "Read frame header: stream_result = " << sr << ", bytes read = " << bytes_read << ", error = " << error; if (talk_base::SR_SUCCESS == sr) { if (CapturedFrame::kFrameHeaderSize != bytes_read) { return talk_base::SR_EOS; } talk_base::ByteBuffer buffer(header, CapturedFrame::kFrameHeaderSize); buffer.ReadUInt32(reinterpret_cast<uint32*>(&frame->width)); buffer.ReadUInt32(reinterpret_cast<uint32*>(&frame->height)); buffer.ReadUInt32(&frame->fourcc); buffer.ReadUInt32(&frame->pixel_width); buffer.ReadUInt32(&frame->pixel_height); buffer.ReadUInt64(reinterpret_cast<uint64*>(&frame->elapsed_time)); buffer.ReadUInt64(reinterpret_cast<uint64*>(&frame->time_stamp)); buffer.ReadUInt32(&frame->data_size); } return sr; } // Executed in the context of FileReadThread. bool FileVideoCapturer::ReadFrame(bool first_frame, int* wait_time_ms) { uint32 start_read_time_ms = talk_base::Time(); // 1. Signal the previously read frame to downstream. if (!first_frame) { captured_frame_.time_stamp = kNumNanoSecsPerMilliSec * static_cast<int64>(start_read_time_ms); captured_frame_.elapsed_time = captured_frame_.time_stamp - start_time_ns_; SignalFrameCaptured(this, &captured_frame_); } // 2. Read the next frame. if (talk_base::SS_CLOSED == video_file_.GetState()) { LOG(LS_ERROR) << "File not opened yet"; return false; } // 2.1 Read the frame header. talk_base::StreamResult result = ReadFrameHeader(&captured_frame_); if (talk_base::SR_EOS == result) { // Loop back if repeat. if (repeat_ != talk_base::kForever) { if (repeat_ > 0) { --repeat_; } else { return false; } } if (video_file_.SetPosition(0)) { result = ReadFrameHeader(&captured_frame_); } } if (talk_base::SR_SUCCESS != result) { LOG(LS_ERROR) << "Failed to read the frame header"; return false; } // 2.2 Reallocate memory for the frame data if necessary. if (frame_buffer_size_ < captured_frame_.data_size) { frame_buffer_size_ = captured_frame_.data_size; delete[] static_cast<char*>(captured_frame_.data); captured_frame_.data = new char[frame_buffer_size_]; } // 2.3 Read the frame adata. if (talk_base::SR_SUCCESS != video_file_.Read(captured_frame_.data, captured_frame_.data_size, NULL, NULL)) { LOG(LS_ERROR) << "Failed to read frame data"; return false; } // 3. Decide how long to wait for the next frame. *wait_time_ms = 0; // If the capture format's interval is not kMinimumInterval, we use it to // control the rate; otherwise, we use the timestamp in the file to control // the rate. if (!first_frame && !ignore_framerate_) { int64 interval_ns = GetCaptureFormat()->interval > VideoFormat::kMinimumInterval ? GetCaptureFormat()->interval : captured_frame_.time_stamp - last_frame_timestamp_ns_; int interval_ms = static_cast<int>(interval_ns / kNumNanoSecsPerMilliSec); interval_ms -= talk_base::Time() - start_read_time_ms; if (interval_ms > 0) { *wait_time_ms = interval_ms; } } // Keep the original timestamp read from the file. last_frame_timestamp_ns_ = captured_frame_.time_stamp; return true; } } // namespace cricket
wangscript/libjingle-1
trunk/talk/media/devices/filevideocapturer.cc
C++
bsd-3-clause
12,792
package collectors import ( "encoding/json" "fmt" "net/http" "net/url" "strings" "bosun.org/metadata" "bosun.org/opentsdb" ) var ( rmqQueueStatusMap = map[string]int{ "running": 0, "syncing": 1, "flow": 2, "idle": 3, "down": 4, } ) const ( defaultRabbitmqURL string = "http://guest:guest@localhost:15672" rmqPrefix = "rabbitmq." ) func init() { collectors = append(collectors, &IntervalCollector{F: c_rabbitmq_overview, Enable: enableRabbitmq}) collectors = append(collectors, &IntervalCollector{F: c_rabbitmq_queues, Enable: enableRabbitmq}) collectors = append(collectors, &IntervalCollector{F: c_rabbitmq_nodes, Enable: enableRabbitmq}) } // RabbitMQ registers a RabbitMQ collector. func RabbitMQ(url string) error { safeURL, err := urlUserHost(url) if err != nil { return err } collectors = append(collectors, &IntervalCollector{ F: func() (opentsdb.MultiDataPoint, error) { return rabbitmqOverview(url) }, name: fmt.Sprintf("rabbitmq-overview-%s", safeURL), }, &IntervalCollector{ F: func() (opentsdb.MultiDataPoint, error) { return rabbitmqNodes(url) }, name: fmt.Sprintf("rabbitmq-nodes-%s", safeURL), }, &IntervalCollector{ F: func() (opentsdb.MultiDataPoint, error) { return rabbitmqQueues(url) }, name: fmt.Sprintf("rabbitmq-queues-%s", safeURL), }) return nil } func enableRabbitmq() bool { return enableURL(defaultRabbitmqURL)() } func c_rabbitmq_overview() (opentsdb.MultiDataPoint, error) { return rabbitmqOverview(defaultRabbitmqURL) } func c_rabbitmq_nodes() (opentsdb.MultiDataPoint, error) { return rabbitmqNodes(defaultRabbitmqURL) } func c_rabbitmq_queues() (opentsdb.MultiDataPoint, error) { return rabbitmqQueues(defaultRabbitmqURL) } func rabbitmqOverview(s string) (opentsdb.MultiDataPoint, error) { p := rmqPrefix + "overview." res, err := http.Get(s + "/api/overview") if err != nil { return nil, err } defer res.Body.Close() var o rmqOverview if err := json.NewDecoder(res.Body).Decode(&o); err != nil { return nil, err } var md opentsdb.MultiDataPoint splitNode := strings.Split(o.Node, "@") if len(splitNode) < 2 { return nil, fmt.Errorf("Error: invalid RabbitMQ node name, can not split '%s'", o.Node) } host := splitNode[1] ts := opentsdb.TagSet{"host": host} Add(&md, p+"channels", o.ObjectTotals.Channels, ts, metadata.Gauge, metadata.Channel, DescRmqObjecttotalsChannels) Add(&md, p+"connections", o.ObjectTotals.Connections, ts, metadata.Gauge, metadata.Connection, DescRmqObjectTotalsConnections) Add(&md, p+"consumers", o.ObjectTotals.Consumers, ts, metadata.Gauge, metadata.Consumer, DescRmqObjectTotalsConsumers) Add(&md, p+"exchanges", o.ObjectTotals.Exchanges, ts, metadata.Gauge, metadata.Exchange, DescRmqObjectTotalsExchanges) Add(&md, p+"queues", o.ObjectTotals.Queues, ts, metadata.Gauge, metadata.Queue, DescRmqObjectTotalsQueues) msgStats := rabbitmqMessageStats(p, ts, o.MessageStats) md = append(md, msgStats...) return md, nil } func rabbitmqQueues(s string) (opentsdb.MultiDataPoint, error) { p := rmqPrefix + "queue." res, err := http.Get(s + "/api/queues") if err != nil { return nil, err } defer res.Body.Close() var qs []rmqQueue if err := json.NewDecoder(res.Body).Decode(&qs); err != nil { return nil, err } var md opentsdb.MultiDataPoint for _, q := range qs { if strings.HasPrefix(q.Name, "amq.gen-") { continue // skip auto-generated queues } splitNode := strings.Split(q.Node, "@") if len(splitNode) < 2 { return nil, fmt.Errorf("Error: invalid RabbitMQ node name, can not split '%s'", q.Node) } host := splitNode[1] ts := opentsdb.TagSet{"host": host, "queue": q.Name, "vhost": q.Vhost} Add(&md, p+"consumers", q.Consumers, ts, metadata.Gauge, metadata.Consumer, DescRmqConsumers) Add(&md, p+"memory", q.Memory, ts, metadata.Gauge, metadata.Bytes, DescRmqMemory) Add(&md, p+"message_bytes_total", q.MessageBytes, ts, metadata.Gauge, metadata.Bytes, DescRmqMessageBytes) Add(&md, p+"message_bytes_persistent", q.MessageBytesPersistent, ts, metadata.Gauge, metadata.Bytes, DescRmqMessageBytesPersistent) Add(&md, p+"message_bytes_transient", q.MessageBytesRAM, ts, metadata.Gauge, metadata.Bytes, DescRmqMessageBytesRAM) Add(&md, p+"message_bytes_ready", q.MessageBytesReady, ts, metadata.Gauge, metadata.Bytes, DescRmqMessageBytesReady) Add(&md, p+"message_bytes_unack", q.MessageBytesUnacknowledged, ts, metadata.Gauge, metadata.Bytes, DescRmqMessageBytesUnacknowledged) Add(&md, p+"messages_queue_depth", q.Messages, ts, metadata.Gauge, metadata.Message, DescRmqMessages) Add(&md, p+"messages_persistent", q.MessagesPersistent, ts, metadata.Gauge, metadata.Message, DescRmqMessagesPersistent) Add(&md, p+"messages_transient", q.MessagesRAM, ts, metadata.Gauge, metadata.Message, DescRmqMessagesRAM) Add(&md, p+"messages_ready_total", q.MessagesReady, ts, metadata.Gauge, metadata.Message, DescRmqMessagesReady) Add(&md, p+"messages_ready_transient", q.MessagesReadyRAM, ts, metadata.Gauge, metadata.Message, DescRmqMessagesReadyRAM) Add(&md, p+"messages_unack_total", q.MessagesUnacknowledged, ts, metadata.Gauge, metadata.Message, DescRmqMessagesUnacknowledged) Add(&md, p+"messages_unack_transient", q.MessagesUnacknowledgedRAM, ts, metadata.Gauge, metadata.Message, DescRmqMessagesUnacknowledgedRAM) if sn, ok := q.SlaveNodes.([]interface{}); ok { Add(&md, p+"slave_nodes", len(sn), ts, metadata.Gauge, metadata.Node, DescRmqSlaveNodes) } if dsn, ok := q.DownSlaveNodes.([]interface{}); ok { Add(&md, p+"down_slave_nodes", len(dsn), ts, metadata.Gauge, metadata.Node, DescRmqDownSlaveNodes) } if ssn, ok := q.SynchronisedSlaveNodes.([]interface{}); ok { Add(&md, p+"sync_slave_nodes", len(ssn), ts, metadata.Gauge, metadata.Node, DescRmqSynchronisedSlaveNodes) } if cu, ok := q.ConsumerUtilisation.(float64); ok { Add(&md, p+"consumer_utilisation", cu, ts, metadata.Gauge, metadata.Fraction, DescRmqConsumerUtilisation) } msgStats := rabbitmqMessageStats(p, ts, q.MessageStats) md = append(md, msgStats...) backingQueueStatus := rabbitmqBackingQueueStatus(p+"backing_queue.", ts, q.BackingQueueStatus) md = append(md, backingQueueStatus...) if state, ok := rmqQueueStatusMap[q.State]; ok { Add(&md, p+"state", state, ts, metadata.Gauge, metadata.StatusCode, DescRmqState) } else { Add(&md, p+"state", -1, ts, metadata.Gauge, metadata.StatusCode, DescRmqState) } } return md, nil } func rabbitmqNodes(s string) (opentsdb.MultiDataPoint, error) { p := rmqPrefix + "node." res, err := http.Get(s + "/api/nodes") if err != nil { return nil, err } defer res.Body.Close() var ns []rmqNode if err := json.NewDecoder(res.Body).Decode(&ns); err != nil { return nil, err } var md opentsdb.MultiDataPoint for _, n := range ns { splitName := strings.Split(n.Name, "@") if len(splitName) < 2 { return nil, fmt.Errorf("Error: invalid RabbitMQ node name, can not split '%s'", n.Name) } host := splitName[1] ts := opentsdb.TagSet{"host": host} Add(&md, p+"disk_free", n.DiskFree, ts, metadata.Gauge, metadata.Consumer, DescRmqDiskFree) Add(&md, p+"disk_free_alarm", n.DiskFreeAlarm, ts, metadata.Gauge, metadata.Bool, DescRmqDiskFreeAlarm) Add(&md, p+"disk_free_limit", n.DiskFreeLimit, ts, metadata.Gauge, metadata.Consumer, DescRmqDiskFreeLimit) Add(&md, p+"fd_total", n.FDTotal, ts, metadata.Gauge, metadata.Files, DescRmqFDTotal) Add(&md, p+"fd_used", n.FDUsed, ts, metadata.Gauge, metadata.Files, DescRmqFDUsed) Add(&md, p+"mem_used", n.MemUsed, ts, metadata.Gauge, metadata.Bytes, DescRmqMemUsed) Add(&md, p+"mem_alarm", n.MemAlarm, ts, metadata.Gauge, metadata.Bool, DescRmqMemAlarm) Add(&md, p+"mem_limit", n.MemLimit, ts, metadata.Gauge, metadata.Bytes, DescRmqMemLimit) Add(&md, p+"proc_used", n.ProcUsed, ts, metadata.Gauge, metadata.Process, DescRmqProcUsed) Add(&md, p+"proc_total", n.ProcTotal, ts, metadata.Gauge, metadata.Process, DescRmqProcTotal) Add(&md, p+"sockets_used", n.SocketsUsed, ts, metadata.Gauge, metadata.Socket, DescRmqSocketsUsed) Add(&md, p+"sockets_total", n.SocketsTotal, ts, metadata.Gauge, metadata.Socket, DescRmqSocketsTotal) Add(&md, p+"uptime", n.Uptime, ts, metadata.Gauge, metadata.Second, DescRmqUptime) Add(&md, p+"running", n.Running, ts, metadata.Gauge, metadata.StatusCode, DescRmqRunning) if partitions, ok := n.Partitions.([]interface{}); ok { Add(&md, p+"partitions", len(partitions), ts, metadata.Gauge, metadata.Node, DescRmqPartitions) } } return md, nil } func rabbitmqMessageStats(p string, ts opentsdb.TagSet, ms rmqMessageStats) opentsdb.MultiDataPoint { var md opentsdb.MultiDataPoint Add(&md, p+"message_stats", ms.Ack, ts.Copy().Merge(opentsdb.TagSet{"method": "ack"}), metadata.Counter, metadata.Message, DescRmqMessageStatsAck) Add(&md, p+"message_stats", ms.Confirm, ts.Copy().Merge(opentsdb.TagSet{"method": "confirm"}), metadata.Counter, metadata.Message, DescRmqMessageStatsConfirm) Add(&md, p+"message_stats", ms.Deliver, ts.Copy().Merge(opentsdb.TagSet{"method": "deliver"}), metadata.Counter, metadata.Message, DescRmqMessageStatsDeliver) Add(&md, p+"message_stats", ms.DeliverGet, ts.Copy().Merge(opentsdb.TagSet{"method": "deliver_get"}), metadata.Counter, metadata.Message, DescRmqMessageStatsDeliverGet) Add(&md, p+"message_stats", ms.DeliverNoAck, ts.Copy().Merge(opentsdb.TagSet{"method": "deliver_noack"}), metadata.Counter, metadata.Message, DescRmqMessageStatsDeliverNoAck) Add(&md, p+"message_stats", ms.Get, ts.Copy().Merge(opentsdb.TagSet{"method": "get"}), metadata.Counter, metadata.Message, DescRmqMessageStatsGet) Add(&md, p+"message_stats", ms.GetNoAck, ts.Copy().Merge(opentsdb.TagSet{"method": "get_noack"}), metadata.Counter, metadata.Message, DescRmqMessageStatsGetNoack) Add(&md, p+"message_stats", ms.Publish, ts.Copy().Merge(opentsdb.TagSet{"method": "publish"}), metadata.Counter, metadata.Message, DescRmqMessageStatsPublish) Add(&md, p+"message_stats", ms.PublishIn, ts.Copy().Merge(opentsdb.TagSet{"method": "publish_in"}), metadata.Counter, metadata.Message, DescRmqMessageStatsPublishIn) Add(&md, p+"message_stats", ms.PublishOut, ts.Copy().Merge(opentsdb.TagSet{"method": "publish_out"}), metadata.Counter, metadata.Message, DescRmqMessageStatsPublishOut) Add(&md, p+"message_stats", ms.Redeliver, ts.Copy().Merge(opentsdb.TagSet{"method": "redeliver"}), metadata.Counter, metadata.Message, DescRmqMessageStatsRedeliver) Add(&md, p+"message_stats", ms.Return, ts.Copy().Merge(opentsdb.TagSet{"method": "return"}), metadata.Counter, metadata.Message, DescRmqMessageStatsReturn) return md } func rabbitmqBackingQueueStatus(p string, ts opentsdb.TagSet, bqs rmqBackingQueueStatus) opentsdb.MultiDataPoint { var md opentsdb.MultiDataPoint Add(&md, p+"avg_rate", bqs.AvgAckEgressRate, ts.Copy().Merge(opentsdb.TagSet{"method": "ack", "direction": "out"}), metadata.Rate, metadata.Message, DescRmqBackingQueueStatusAvgAckEgressRate) Add(&md, p+"avg_rate", bqs.AvgAckIngressRate, ts.Copy().Merge(opentsdb.TagSet{"method": "ack", "direction": "in"}), metadata.Rate, metadata.Message, DescRmqBackingQueueStatusAvgAckIngressRate) Add(&md, p+"avg_rate", bqs.AvgEgressRate, ts.Copy().Merge(opentsdb.TagSet{"method": "noack", "direction": "out"}), metadata.Rate, metadata.Message, DescRmqBackingQueueStatusAvgEgressRate) Add(&md, p+"avg_rate", bqs.AvgIngressRate, ts.Copy().Merge(opentsdb.TagSet{"method": "noack", "direction": "in"}), metadata.Rate, metadata.Message, DescRmqBackingQueueStatusAvgIngressRate) Add(&md, p+"len", bqs.Len, ts, metadata.Gauge, metadata.Message, DescRmqBackingQueueStatusLen) return md } func urlUserHost(s string) (string, error) { u, err := url.Parse(s) if err != nil { return "", err } if u.User != nil { res := fmt.Sprintf("%s@%s", u.User.Username(), u.Host) return res, nil } res := fmt.Sprintf("%s", u.Host) return res, nil } type rmqOverview struct { ClusterName string `json:"cluster_name"` MessageStats rmqMessageStats `json:"message_stats"` QueueTotals struct { Messages int `json:"messages"` MessagesReady int `json:"messages_ready"` MessagesUnacknowledged int `json:"messages_unacknowledged"` } `json:"queue_totals"` ObjectTotals struct { Consumers int `json:"consumers"` Queues int `json:"queues"` Exchanges int `json:"exchanges"` Connections int `json:"connections"` Channels int `json:"channels"` } `json:"object_totals"` Node string `json:"node"` } type rmqNode struct { DiskFree int64 `json:"disk_free"` DiskFreeAlarm bool `json:"disk_free_alarm"` DiskFreeLimit int `json:"disk_free_limit"` FDTotal int `json:"fd_total"` FDUsed int `json:"fd_used"` MemAlarm bool `json:"mem_alarm"` MemLimit int64 `json:"mem_limit"` MemUsed int `json:"mem_used"` Name string `json:"name"` Partitions interface{} `json:"partitions"` ProcTotal int `json:"proc_total"` ProcUsed int `json:"proc_used"` Processors int `json:"processors"` RunQueue int `json:"run_queue"` Running bool `json:"running"` SocketsTotal int `json:"sockets_total"` SocketsUsed int `json:"sockets_used"` Uptime int `json:"uptime"` } type rmqQueue struct { Messages int `json:"messages"` MessagesReady int `json:"messages_ready"` MessagesUnacknowledged int `json:"messages_unacknowledged"` Consumers int `json:"consumers"` ConsumerUtilisation interface{} `json:"consumer_utilisation"` Memory int `json:"memory"` SlaveNodes interface{} `json:"slave_nodes"` SynchronisedSlaveNodes interface{} `json:"synchronised_slave_nodes"` DownSlaveNodes interface{} `json:"down_slave_nodes"` BackingQueueStatus rmqBackingQueueStatus `json:"backing_queue_status"` State string `json:"state"` MessagesRAM int `json:"messages_ram"` MessagesReadyRAM int `json:"messages_ready_ram"` MessagesUnacknowledgedRAM int `json:"messages_unacknowledged_ram"` MessagesPersistent int `json:"messages_persistent"` MessageBytes int `json:"message_bytes"` MessageBytesReady int `json:"message_bytes_ready"` MessageBytesUnacknowledged int `json:"message_bytes_unacknowledged"` MessageBytesRAM int `json:"message_bytes_ram"` MessageBytesPersistent int `json:"message_bytes_persistent"` Name string `json:"name"` Vhost string `json:"vhost"` Durable bool `json:"durable"` Node string `json:"node"` MessageStats rmqMessageStats `json:"message_stats"` } type rmqMessageStats struct { Ack int `json:"ack"` Confirm int `json:"confirm"` Deliver int `json:"deliver"` DeliverGet int `json:"deliver_get"` DeliverNoAck int `json:"deliver_no_ack"` Get int `json:"get"` GetAck int `json:"get_ack"` GetNoAck int `json:"get_noack"` Publish int `json:"publish"` PublishIn int `json:"publish_in"` PublishOut int `json:"publish_out"` Redeliver int `json:"redeliver"` Return int `json:"return"` } type rmqBackingQueueStatus struct { Len int `json:"len"` AvgIngressRate float64 `json:"avg_ingress_rate"` AvgEgressRate float64 `json:"avg_egress_rate"` AvgAckIngressRate float64 `json:"avg_ack_ingress_rate"` AvgAckEgressRate float64 `json:"avg_ack_egress_rate"` MirrorSeen int `json:"mirror_seen"` MirrorSenders int `json:"mirror_senders"` } const ( DescRmqBackingQueueStatusAvgAckEgressRate = "Rate at which unacknowledged message records leave RAM, e.g. because acks arrive or unacked messages are paged out" DescRmqBackingQueueStatusAvgAckIngressRate = "Rate at which unacknowledged message records enter RAM, e.g. because messages are delivered requiring acknowledgement" DescRmqBackingQueueStatusAvgEgressRate = "Average egress (outbound) rate, not including messages that straight through to auto-acking consumers." DescRmqBackingQueueStatusAvgIngressRate = "Average ingress (inbound) rate, not including messages that straight through to auto-acking consumers." DescRmqBackingQueueStatusLen = "Total backing queue length." DescRmqConsumers = "Number of consumers." DescRmqConsumerUtilisation = "Fraction of the time (between 0.0 and 1.0) that the queue is able to immediately deliver messages to consumers. This can be less than 1.0 if consumers are limited by network congestion or prefetch count." DescRmqDiskFreeAlarm = "Whether the disk alarm has gone off." DescRmqDiskFree = "Disk free space in bytes." DescRmqDiskFreeLimit = "Point at which the disk alarm will go off." DescRmqDownSlaveNodes = "Count of down nodes having a copy of the queue." DescRmqFDTotal = "File descriptors available." DescRmqFDUsed = "Used file descriptors." DescRmqIOReadAvgTime = "Average wall time (milliseconds) for each disk read operation in the last statistics interval." DescRmqIOReadBytes = "Total number of bytes read from disk by the persister." DescRmqIOReadCount = "Total number of read operations by the persister." DescRmqIOReopenCount = "Total number of times the persister has needed to recycle file handles between queues. In an ideal world this number will be zero; if the number is large, performance might be improved by increasing the number of file handles available to RabbitMQ." DescRmqIOSeekAvgTime = "Average wall time (milliseconds) for each seek operation in the last statistics interval." DescRmqIOSeekCount = "Total number of seek operations by the persister." DescRmqIOSyncAvgTime = "Average wall time (milliseconds) for each sync operation in the last statistics interval." DescRmqIOSyncCount = "Total number of fsync() operations by the persister." DescRmqIOWriteAvgTime = "Average wall time (milliseconds) for each write operation in the last statistics interval." DescRmqIOWriteBytes = "Total number of bytes written to disk by the persister." DescRmqIOWriteCount = "Total number of write operations by the persister." DescRmqMemAlarm = "" DescRmqMemLimit = "Point at which the memory alarm will go off." DescRmqMemory = "Bytes of memory consumed by the Erlang process associated with the queue, including stack, heap and internal structures." DescRmqMemUsed = "Memory used in bytes." DescRmqMessageBytesPersistent = "Like messageBytes but counting only those messages which are persistent." DescRmqMessageBytesRAM = "Like messageBytes but counting only those messages which are in RAM." DescRmqMessageBytesReady = "Like messageBytes but counting only those messages ready to be delivered to clients." DescRmqMessageBytes = "Sum of the size of all message bodies in the queue. This does not include the message properties (including headers) or any overhead." DescRmqMessageBytesUnacknowledged = "Like messageBytes but counting only those messages delivered to clients but not yet acknowledged." DescRmqMessagesPersistent = "Total number of persistent messages in the queue (will always be 0 for transient queues)." DescRmqMessagesRAM = "Total number of messages which are resident in ram." DescRmqMessagesReady = "Number of messages ready to be delivered to clients." DescRmqMessagesReadyRAM = "Number of messages from messagesReady which are resident in ram." DescRmqMessages = "Sum of ready and unacknowledged messages (queue depth)." DescRmqMessageStatsAck = "Count of acknowledged messages." DescRmqMessageStatsConfirm = "Count of messages confirmed." DescRmqMessageStatsDeliver = "Count of messages delivered in acknowledgement mode to consumers." DescRmqMessageStatsDeliverGet = "Sum of deliver, deliverNoack, get, getNoack." DescRmqMessageStatsDeliverNoAck = "Count of messages delivered in no-acknowledgement mode to consumers." DescRmqMessageStatsGet = "Count of messages delivered in acknowledgement mode in response to basic.get." DescRmqMessageStatsGetNoack = "Count of messages delivered in no-acknowledgement mode in response to basic.get." DescRmqMessageStatsPublish = "Count of messages published." DescRmqMessageStatsPublishIn = "Count of messages published \"in\" to an exchange, i.e. not taking account of routing." DescRmqMessageStatsPublishOut = "Count of messages published \"out\" of an exchange, i.e. taking account of routing." DescRmqMessageStatsRedeliver = "Count of subset of messages in deliverGet which had the redelivered flag set." DescRmqMessageStatsReturn = "Count of messages returned to publisher as unroutable." DescRmqMessagesUnacknowledged = "Number of messages delivered to clients but not yet acknowledged." DescRmqMessagesUnacknowledgedRAM = "Number of messages from messagesUnacknowledged which are resident in ram." DescRmqMnesiaDiskTxCount = "Number of Mnesia transactions which have been performed that required writes to disk. (e.g. creating a durable queue). Only transactions which originated on this node are included." DescRmqMnesiaRAMTxCount = "Number of Mnesia transactions which have been performed that did not require writes to disk. (e.g. creating a transient queue). Only transactions which originated on this node are included." DescRmqMsgStoreReadCount = "Number of messages which have been read from the message store." DescRmqMsgStoreWriteCount = "Number of messages which have been written to the message store." DescRmqObjecttotalsChannels = "Overall number of channels." DescRmqObjectTotalsConnections = "Overall number of connections." DescRmqObjectTotalsConsumers = "Overall number of consumers." DescRmqObjectTotalsExchanges = "Overall number of exchanges." DescRmqObjectTotalsQueues = "Overall number of queues." DescRmqPartitions = "Count of network partitions this node is seeing." DescRmqProcessors = "Number of cores detected and usable by Erlang." DescRmqProcTotal = "Maximum number of Erlang processes." DescRmqProcUsed = "Number of Erlang processes in use." DescRmqQueueIndexJournalWriteCount = "Number of records written to the queue index journal. Each record represents a message being published to a queue, being delivered from a queue, and being acknowledged in a queue." DescRmqQueueIndexReadCount = "Number of records read from the queue index." DescRmqQueueIndexWriteCount = "Number of records written to the queue index." DescRmqQueueTotalsMessages = "Overall sum of ready and unacknowledged messages (queue depth)." DescRmqQueueTotalsMessagesReady = "Overall number of messages ready to be delivered to clients." DescRmqQueueTotalsMessagesUnacknowledged = "Overall number of messages delivered to clients but not yet acknowledged." DescRmqRunning = "Boolean for whether this node is up. Obviously if this is false, most other stats will be missing." DescRmqRunQueue = "Average number of Erlang processes waiting to run." DescRmqSlaveNodes = "Count of nodes having a copy of the queue." DescRmqSocketsTotal = "File descriptors available for use as sockets." DescRmqSocketsUsed = "File descriptors used as sockets." DescRmqState = "The state of the queue. Unknown=> -1, Running=> 0, Syncing=> 1, Flow=> 2, Down=> 3" DescRmqSynchronisedSlaveNodes = "Count of nodes having synchronised copy of the queue." DescRmqSyncMessages = "Count of already synchronised messages on a slave node." DescRmqUptime = "Node uptime in seconds." )
augporto/bosun
cmd/scollector/collectors/rabbitmq.go
GO
mit
25,540
require "spec_helper" describe ProjectWiki, models: true do let(:project) { create(:empty_project) } let(:repository) { project.repository } let(:user) { project.owner } let(:gitlab_shell) { Gitlab::Shell.new } let(:project_wiki) { ProjectWiki.new(project, user) } subject { project_wiki } before { project_wiki.wiki } describe "#path_with_namespace" do it "returns the project path with namespace with the .wiki extension" do expect(subject.path_with_namespace).to eq(project.path_with_namespace + ".wiki") end end describe '#web_url' do it 'returns the full web URL to the wiki' do expect(subject.web_url).to eq("#{Gitlab.config.gitlab.url}/#{project.path_with_namespace}/wikis/home") end end describe "#url_to_repo" do it "returns the correct ssh url to the repo" do expect(subject.url_to_repo).to eq(gitlab_shell.url_to_repo(subject.path_with_namespace)) end end describe "#ssh_url_to_repo" do it "equals #url_to_repo" do expect(subject.ssh_url_to_repo).to eq(subject.url_to_repo) end end describe "#http_url_to_repo" do it "provides the full http url to the repo" do gitlab_url = Gitlab.config.gitlab.url repo_http_url = "#{gitlab_url}/#{subject.path_with_namespace}.git" expect(subject.http_url_to_repo).to eq(repo_http_url) end end describe "#wiki_base_path" do it "returns the wiki base path" do wiki_base_path = "#{Gitlab.config.gitlab.relative_url_root}/#{project.path_with_namespace}/wikis" expect(subject.wiki_base_path).to eq(wiki_base_path) end end describe "#wiki" do it "contains a Gollum::Wiki instance" do expect(subject.wiki).to be_a Gollum::Wiki end it "creates a new wiki repo if one does not yet exist" do expect(project_wiki.create_page("index", "test content")).to be_truthy end it "raises CouldNotCreateWikiError if it can't create the wiki repository" do allow(project_wiki).to receive(:init_repo).and_return(false) expect { project_wiki.send(:create_repo!) }.to raise_exception(ProjectWiki::CouldNotCreateWikiError) end end describe "#empty?" do context "when the wiki repository is empty" do before do allow_any_instance_of(Gitlab::Shell).to receive(:add_repository) do create_temp_repo("#{Rails.root}/tmp/test-git-base-path/non-existant.wiki.git") end allow(project).to receive(:path_with_namespace).and_return("non-existant") end describe '#empty?' do subject { super().empty? } it { is_expected.to be_truthy } end end context "when the wiki has pages" do before do project_wiki.create_page("index", "This is an awesome new Gollum Wiki") end describe '#empty?' do subject { super().empty? } it { is_expected.to be_falsey } end end end describe "#pages" do before do create_page("index", "This is an awesome new Gollum Wiki") @pages = subject.pages end after do destroy_page(@pages.first.page) end it "returns an array of WikiPage instances" do expect(@pages.first).to be_a WikiPage end it "returns the correct number of pages" do expect(@pages.count).to eq(1) end end describe "#find_page" do before do create_page("index page", "This is an awesome Gollum Wiki") end after do destroy_page(subject.pages.first.page) end it "returns the latest version of the page if it exists" do page = subject.find_page("index page") expect(page.title).to eq("index page") end it "returns nil if the page does not exist" do expect(subject.find_page("non-existant")).to eq(nil) end it "can find a page by slug" do page = subject.find_page("index-page") expect(page.title).to eq("index page") end it "returns a WikiPage instance" do page = subject.find_page("index page") expect(page).to be_a WikiPage end end describe '#find_file' do before do file = Gollum::File.new(subject.wiki) allow_any_instance_of(Gollum::Wiki). to receive(:file).with('image.jpg', 'master', true). and_return(file) allow_any_instance_of(Gollum::File). to receive(:mime_type). and_return('image/jpeg') allow_any_instance_of(Gollum::Wiki). to receive(:file).with('non-existant', 'master', true). and_return(nil) end after do allow_any_instance_of(Gollum::Wiki).to receive(:file).and_call_original allow_any_instance_of(Gollum::File).to receive(:mime_type).and_call_original end it 'returns the latest version of the file if it exists' do file = subject.find_file('image.jpg') expect(file.mime_type).to eq('image/jpeg') end it 'returns nil if the page does not exist' do expect(subject.find_file('non-existant')).to eq(nil) end it 'returns a Gollum::File instance' do file = subject.find_file('image.jpg') expect(file).to be_a Gollum::File end end describe "#create_page" do after do destroy_page(subject.pages.first.page) end it "creates a new wiki page" do expect(subject.create_page("test page", "this is content")).not_to eq(false) expect(subject.pages.count).to eq(1) end it "returns false when a duplicate page exists" do subject.create_page("test page", "content") expect(subject.create_page("test page", "content")).to eq(false) end it "stores an error message when a duplicate page exists" do 2.times { subject.create_page("test page", "content") } expect(subject.error_message).to match(/Duplicate page:/) end it "sets the correct commit message" do subject.create_page("test page", "some content", :markdown, "commit message") expect(subject.pages.first.page.version.message).to eq("commit message") end it 'updates project activity' do expect(subject).to receive(:update_project_activity) subject.create_page('Test Page', 'This is content') end end describe "#update_page" do before do create_page("update-page", "some content") @gollum_page = subject.wiki.paged("update-page") subject.update_page(@gollum_page, "some other content", :markdown, "updated page") @page = subject.pages.first.page end after do destroy_page(@page) end it "updates the content of the page" do expect(@page.raw_data).to eq("some other content") end it "sets the correct commit message" do expect(@page.version.message).to eq("updated page") end it 'updates project activity' do expect(subject).to receive(:update_project_activity) subject.update_page(@gollum_page, 'Yet more content', :markdown, 'Updated page again') end end describe "#delete_page" do before do create_page("index", "some content") @page = subject.wiki.paged("index") end it "deletes the page" do subject.delete_page(@page) expect(subject.pages.count).to eq(0) end it 'updates project activity' do expect(subject).to receive(:update_project_activity) subject.delete_page(@page) end end describe '#create_repo!' do it 'creates a repository' do expect(subject).to receive(:init_repo). with(subject.path_with_namespace). and_return(true) expect(subject.repository).to receive(:after_create) expect(subject.create_repo!).to be_an_instance_of(Gollum::Wiki) end end describe '#hook_attrs' do it 'returns a hash with values' do expect(subject.hook_attrs).to be_a Hash expect(subject.hook_attrs.keys).to contain_exactly(:web_url, :git_ssh_url, :git_http_url, :path_with_namespace, :default_branch) end end private def create_temp_repo(path) FileUtils.mkdir_p path system(*%W(#{Gitlab.config.git.bin_path} init --quiet --bare -- #{path})) end def remove_temp_repo(path) FileUtils.rm_rf path end def commit_details { name: user.name, email: user.email, message: "test commit" } end def create_page(name, content) subject.wiki.write_page(name, :markdown, content, commit_details) end def destroy_page(page) subject.wiki.delete_page(page, commit_details) end end
mr-dxdy/gitlabhq
spec/models/project_wiki_spec.rb
Ruby
mit
8,476
class StaticController < ApplicationController def guidelines @title = t('static.guidelines.title') end def guidelines_tips @title = t('static.guidelines_tips.title') end def faq @title = t('static.faq.title') end def thank_you backer = Backer.find session[:thank_you_backer_id] redirect_to [backer.project, backer] end def sitemap # TODO: update this sitemap to use new homepage logic @home_page ||= Project.includes(:user, :category).visible.limit(6).all @expiring ||= Project.includes(:user, :category).visible.expiring.not_expired.order("(projects.expires_at), created_at DESC").limit(3).all @recent ||= Project.includes(:user, :category).visible.not_expiring.not_expired.where("projects.user_id <> 7329").order('created_at DESC').limit(3).all @successful ||= Project.includes(:user, :category).visible.successful.order("(projects.expires_at) DESC").limit(3).all return render 'sitemap' end end
alexandred/catarse
app/controllers/static_controller.rb
Ruby
mit
983
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Runtime.ExceptionServices; using System.Threading; using System.Threading.Tasks; namespace System.Net.Sockets { // This object is used to wrap a bunch of ConnectAsync operations // on behalf of a single user call to ConnectAsync with a DnsEndPoint internal abstract class MultipleConnectAsync { protected SocketAsyncEventArgs _userArgs; protected SocketAsyncEventArgs _internalArgs; protected DnsEndPoint _endPoint; protected IPAddress[] _addressList; protected int _nextAddress; private enum State { NotStarted, DnsQuery, ConnectAttempt, Completed, Canceled, } private State _state; private object _lockObject = new object(); // Called by Socket to kick off the ConnectAsync process. We'll complete the user's SAEA // when it's done. Returns true if the operation will be asynchronous, false if it has failed synchronously public bool StartConnectAsync(SocketAsyncEventArgs args, DnsEndPoint endPoint) { lock (_lockObject) { if (endPoint.AddressFamily != AddressFamily.Unspecified && endPoint.AddressFamily != AddressFamily.InterNetwork && endPoint.AddressFamily != AddressFamily.InterNetworkV6) { NetEventSource.Fail(this, $"Unexpected endpoint address family: {endPoint.AddressFamily}"); } _userArgs = args; _endPoint = endPoint; // If Cancel() was called before we got the lock, it only set the state to Canceled: we need to // fail synchronously from here. Once State.DnsQuery is set, the Cancel() call will handle calling AsyncFail. if (_state == State.Canceled) { SyncFail(new SocketException((int)SocketError.OperationAborted)); return false; } if (_state != State.NotStarted) { NetEventSource.Fail(this, "MultipleConnectAsync.StartConnectAsync(): Unexpected object state"); } _state = State.DnsQuery; IAsyncResult result = Dns.BeginGetHostAddresses(endPoint.Host, new AsyncCallback(DnsCallback), null); if (result.CompletedSynchronously) { return DoDnsCallback(result, true); } else { return true; } } } // Callback which fires when the Dns Resolve is complete private void DnsCallback(IAsyncResult result) { if (!result.CompletedSynchronously) { DoDnsCallback(result, false); } } // Called when the DNS query completes (either synchronously or asynchronously). Checks for failure and // starts the first connection attempt if it succeeded. Returns true if the operation will be asynchronous, // false if it has failed synchronously. private bool DoDnsCallback(IAsyncResult result, bool sync) { Exception exception = null; lock (_lockObject) { // If the connection attempt was canceled during the dns query, the user's callback has already been // called asynchronously and we simply need to return. if (_state == State.Canceled) { return true; } if (_state != State.DnsQuery) { NetEventSource.Fail(this, "MultipleConnectAsync.DoDnsCallback(): Unexpected object state"); } try { _addressList = Dns.EndGetHostAddresses(result); if (_addressList == null) { NetEventSource.Fail(this, "MultipleConnectAsync.DoDnsCallback(): EndGetHostAddresses returned null!"); } } catch (Exception e) { _state = State.Completed; exception = e; } // If the dns query succeeded, try to connect to the first address if (exception == null) { _state = State.ConnectAttempt; _internalArgs = new SocketAsyncEventArgs(); _internalArgs.Completed += InternalConnectCallback; _internalArgs.CopyBufferFrom(_userArgs); exception = AttemptConnection(); if (exception != null) { // There was a synchronous error while connecting _state = State.Completed; } } } // Call this outside of the lock because it might call the user's callback. if (exception != null) { return Fail(sync, exception); } else { return true; } } // Callback which fires when an internal connection attempt completes. // If it failed and there are more addresses to try, do it. private void InternalConnectCallback(object sender, SocketAsyncEventArgs args) { Exception exception = null; lock (_lockObject) { if (_state == State.Canceled) { // If Cancel was called before we got the lock, the Socket will be closed soon. We need to report // OperationAborted (even though the connection actually completed), or the user will try to use a // closed Socket. exception = new SocketException((int)SocketError.OperationAborted); } else { Debug.Assert(_state == State.ConnectAttempt); if (args.SocketError == SocketError.Success) { // The connection attempt succeeded; go to the completed state. // The callback will be called outside the lock. _state = State.Completed; } else if (args.SocketError == SocketError.OperationAborted) { // The socket was closed while the connect was in progress. This can happen if the user // closes the socket, and is equivalent to a call to CancelConnectAsync exception = new SocketException((int)SocketError.OperationAborted); _state = State.Canceled; } else { // Keep track of this because it will be overwritten by AttemptConnection SocketError currentFailure = args.SocketError; Exception connectException = AttemptConnection(); if (connectException == null) { // don't call the callback, another connection attempt is successfully started return; } else { SocketException socketException = connectException as SocketException; if (socketException != null && socketException.SocketErrorCode == SocketError.NoData) { // If the error is NoData, that means there are no more IPAddresses to attempt // a connection to. Return the last error from an actual connection instead. exception = new SocketException((int)currentFailure); } else { exception = connectException; } _state = State.Completed; } } } } if (exception == null) { Succeed(); } else { AsyncFail(exception); } } // Called to initiate a connection attempt to the next address in the list. Returns an exception // if the attempt failed synchronously, or null if it was successfully initiated. private Exception AttemptConnection() { try { Socket attemptSocket; IPAddress attemptAddress = GetNextAddress(out attemptSocket); if (attemptAddress == null) { return new SocketException((int)SocketError.NoData); } _internalArgs.RemoteEndPoint = new IPEndPoint(attemptAddress, _endPoint.Port); return AttemptConnection(attemptSocket, _internalArgs); } catch (Exception e) { if (e is ObjectDisposedException) { NetEventSource.Fail(this, "unexpected ObjectDisposedException"); } return e; } } private Exception AttemptConnection(Socket attemptSocket, SocketAsyncEventArgs args) { try { if (attemptSocket == null) { NetEventSource.Fail(null, "attemptSocket is null!"); } bool pending = attemptSocket.ConnectAsync(args); if (!pending) { InternalConnectCallback(null, args); } } catch (ObjectDisposedException) { // This can happen if the user closes the socket, and is equivalent to a call // to CancelConnectAsync return new SocketException((int)SocketError.OperationAborted); } catch (Exception e) { return e; } return null; } protected abstract void OnSucceed(); private void Succeed() { OnSucceed(); _userArgs.FinishWrapperConnectSuccess(_internalArgs.ConnectSocket, _internalArgs.BytesTransferred, _internalArgs.SocketFlags); _internalArgs.Dispose(); } protected abstract void OnFail(bool abortive); private bool Fail(bool sync, Exception e) { if (sync) { SyncFail(e); return false; } else { AsyncFail(e); return true; } } private void SyncFail(Exception e) { OnFail(false); if (_internalArgs != null) { _internalArgs.Dispose(); } SocketException socketException = e as SocketException; if (socketException != null) { _userArgs.FinishConnectByNameSyncFailure(socketException, 0, SocketFlags.None); } else { ExceptionDispatchInfo.Throw(e); } } private void AsyncFail(Exception e) { OnFail(false); if (_internalArgs != null) { _internalArgs.Dispose(); } _userArgs.FinishOperationAsyncFailure(e, 0, SocketFlags.None); } public void Cancel() { bool callOnFail = false; lock (_lockObject) { switch (_state) { case State.NotStarted: // Cancel was called before the Dns query was started. The dns query won't be started // and the connection attempt will fail synchronously after the state change to DnsQuery. // All we need to do here is close all the sockets. callOnFail = true; break; case State.DnsQuery: // Cancel was called after the Dns query was started, but before it finished. We can't // actually cancel the Dns query, but we'll fake it by failing the connect attempt asynchronously // from here, and silently dropping the connection attempt when the Dns query finishes. Task.Factory.StartNew( s => CallAsyncFail(s), null, CancellationToken.None, TaskCreationOptions.DenyChildAttach, TaskScheduler.Default); callOnFail = true; break; case State.ConnectAttempt: // Cancel was called after the Dns query completed, but before we had a connection result to give // to the user. Closing the sockets will cause any in-progress ConnectAsync call to fail immediately // with OperationAborted, and will cause ObjectDisposedException from any new calls to ConnectAsync // (which will be translated to OperationAborted by AttemptConnection). callOnFail = true; break; case State.Completed: // Cancel was called after we locked in a result to give to the user. Ignore it and give the user // the real completion. break; default: NetEventSource.Fail(this, "Unexpected object state"); break; } _state = State.Canceled; } // Call this outside the lock because Socket.Close may block if (callOnFail) { OnFail(true); } } // Call AsyncFail on a threadpool thread so it's asynchronous with respect to Cancel(). private void CallAsyncFail(object ignored) { AsyncFail(new SocketException((int)SocketError.OperationAborted)); } protected abstract IPAddress GetNextAddress(out Socket attemptSocket); } // Used when the instance ConnectAsync method is called, or when the DnsEndPoint specified // an AddressFamily. There's only one Socket, and we only try addresses that match its // AddressFamily internal sealed class SingleSocketMultipleConnectAsync : MultipleConnectAsync { private Socket _socket; private bool _userSocket; public SingleSocketMultipleConnectAsync(Socket socket, bool userSocket) { _socket = socket; _userSocket = userSocket; } protected override IPAddress GetNextAddress(out Socket attemptSocket) { _socket.ReplaceHandleIfNecessaryAfterFailedConnect(); IPAddress rval = null; do { if (_nextAddress >= _addressList.Length) { attemptSocket = null; return null; } rval = _addressList[_nextAddress]; ++_nextAddress; } while (!_socket.CanTryAddressFamily(rval.AddressFamily)); attemptSocket = _socket; return rval; } protected override void OnFail(bool abortive) { // Close the socket if this is an abortive failure (CancelConnectAsync) // or if we created it internally if (abortive || !_userSocket) { _socket.Dispose(); } } // nothing to do on success protected override void OnSucceed() { } } // This is used when the static ConnectAsync method is called. We don't know the address family // ahead of time, so we create both IPv4 and IPv6 sockets. internal sealed class DualSocketMultipleConnectAsync : MultipleConnectAsync { private Socket _socket4; private Socket _socket6; public DualSocketMultipleConnectAsync(SocketType socketType, ProtocolType protocolType) { if (Socket.OSSupportsIPv4) { _socket4 = new Socket(AddressFamily.InterNetwork, socketType, protocolType); } if (Socket.OSSupportsIPv6) { _socket6 = new Socket(AddressFamily.InterNetworkV6, socketType, protocolType); } } protected override IPAddress GetNextAddress(out Socket attemptSocket) { IPAddress rval = null; attemptSocket = null; while (attemptSocket == null) { if (_nextAddress >= _addressList.Length) { return null; } rval = _addressList[_nextAddress]; ++_nextAddress; if (rval.AddressFamily == AddressFamily.InterNetworkV6) { attemptSocket = _socket6; } else if (rval.AddressFamily == AddressFamily.InterNetwork) { attemptSocket = _socket4; } } attemptSocket?.ReplaceHandleIfNecessaryAfterFailedConnect(); return rval; } // on success, close the socket that wasn't used protected override void OnSucceed() { if (_socket4 != null && !_socket4.Connected) { _socket4.Dispose(); } if (_socket6 != null && !_socket6.Connected) { _socket6.Dispose(); } } // close both sockets whether its abortive or not - we always create them internally protected override void OnFail(bool abortive) { _socket4?.Dispose(); _socket6?.Dispose(); } } }
Ermiar/corefx
src/System.Net.Sockets/src/System/Net/Sockets/MultipleConnectAsync.cs
C#
mit
18,925
package org.knowm.xchange.ccex.dto.account; import com.fasterxml.jackson.annotation.JsonProperty; import java.math.BigDecimal; public class CCEXBalance { private String Currency; private BigDecimal Balance; private BigDecimal Available; private BigDecimal Pending; private String CryptoAddress; public CCEXBalance( @JsonProperty("Currency") String currency, @JsonProperty("Balance") BigDecimal balance, @JsonProperty("Available") BigDecimal available, @JsonProperty("Pending") BigDecimal pending, @JsonProperty("CryptoAddress") String cryptoAddress) { super(); Currency = currency; Balance = balance; Available = available; Pending = pending; CryptoAddress = cryptoAddress; } public String getCurrency() { return Currency; } public void setCurrency(String currency) { Currency = currency; } public BigDecimal getBalance() { return Balance; } public void setBalance(BigDecimal balance) { Balance = balance; } public BigDecimal getAvailable() { return Available; } public void setAvailable(BigDecimal available) { Available = available; } public BigDecimal getPending() { return Pending; } public void setPending(BigDecimal pending) { Pending = pending; } public String getCryptoAddress() { return CryptoAddress; } public void setCryptoAddress(String cryptoAddress) { CryptoAddress = cryptoAddress; } @Override public String toString() { return "CCEXBalance [Currency=" + Currency + ", Balance=" + Balance + ", Available=" + Available + ", Pending=" + Pending + ", CryptoAddress=" + CryptoAddress + "]"; } }
timmolter/XChange
xchange-ccex/src/main/java/org/knowm/xchange/ccex/dto/account/CCEXBalance.java
Java
mit
1,759
angular.module('ordercloud-address', []) .directive('ordercloudAddressForm', AddressFormDirective) .directive('ordercloudAddressInfo', AddressInfoDirective) .filter('address', AddressFilter) ; function AddressFormDirective(OCGeography) { return { restrict: 'E', scope: { address: '=', isbilling: '=' }, templateUrl: 'common/address/templates/address.form.tpl.html', link: function(scope) { scope.countries = OCGeography.Countries; scope.states = OCGeography.States; } }; } function AddressInfoDirective() { return { restrict: 'E', scope: { addressid: '@' }, templateUrl: 'common/address/templates/address.info.tpl.html', controller: 'AddressInfoCtrl', controllerAs: 'addressInfo' }; } function AddressFilter() { return function(address, option) { if (!address) return null; if (option === 'full') { var result = []; if (address.AddressName) { result.push(address.AddressName); } result.push((address.FirstName ? address.FirstName + ' ' : '') + address.LastName); result.push(address.Street1); if (address.Street2) { result.push(address.Street2); } result.push(address.City + ', ' + address.State + ' ' + address.Zip); return result.join('\n'); } else { return address.Street1 + (address.Street2 ? ', ' + address.Street2 : ''); } } }
Four51/OrderCloud-Seed-AngularJS
src/app/common/address/address.js
JavaScript
mit
1,620
// RUN: %clang_cc1 -verify -fopenmp=libiomp5 %s void foo() { } bool foobool(int argc) { return argc; } struct S1; // expected-note {{declared here}} template <class T, typename S, int N, int ST> // expected-note {{declared here}} T tmain(T argc, S **argv) { //expected-note 2 {{declared here}} #pragma omp parallel for collapse // expected-error {{expected '(' after 'collapse'}} for (int i = ST; i < N; i++) argv[0][i] = argv[0][i] - argv[0][i-ST]; #pragma omp parallel for collapse ( // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}} for (int i = ST; i < N; i++) argv[0][i] = argv[0][i] - argv[0][i-ST]; #pragma omp parallel for collapse () // expected-error {{expected expression}} for (int i = ST; i < N; i++) argv[0][i] = argv[0][i] - argv[0][i-ST]; // expected-error@+3 {{expected ')'}} expected-note@+3 {{to match this '('}} // expected-error@+2 2 {{expression is not an integral constant expression}} // expected-note@+1 2 {{read of non-const variable 'argc' is not allowed in a constant expression}} #pragma omp parallel for collapse (argc for (int i = ST; i < N; i++) argv[0][i] = argv[0][i] - argv[0][i-ST]; // expected-error@+1 2 {{argument to 'collapse' clause must be a positive integer value}} #pragma omp parallel for collapse (ST // expected-error {{expected ')'}} expected-note {{to match this '('}} for (int i = ST; i < N; i++) argv[0][i] = argv[0][i] - argv[0][i-ST]; #pragma omp parallel for collapse (1)) // expected-warning {{extra tokens at the end of '#pragma omp parallel for' are ignored}} for (int i = ST; i < N; i++) argv[0][i] = argv[0][i] - argv[0][i-ST]; #pragma omp parallel for collapse ((ST > 0) ? 1 + ST : 2) // expected-note 2 {{as specified in 'collapse' clause}} for (int i = ST; i < N; i++) argv[0][i] = argv[0][i] - argv[0][i-ST]; // expected-error 2 {{expected 2 for loops after '#pragma omp parallel for', but found only 1}} // expected-error@+3 2 {{directive '#pragma omp parallel for' cannot contain more than one 'collapse' clause}} // expected-error@+2 2 {{argument to 'collapse' clause must be a positive integer value}} // expected-error@+1 2 {{expression is not an integral constant expression}} #pragma omp parallel for collapse (foobool(argc)), collapse (true), collapse (-5) for (int i = ST; i < N; i++) argv[0][i] = argv[0][i] - argv[0][i-ST]; #pragma omp parallel for collapse (S) // expected-error {{'S' does not refer to a value}} for (int i = ST; i < N; i++) argv[0][i] = argv[0][i] - argv[0][i-ST]; // expected-error@+1 2 {{expression is not an integral constant expression}} #pragma omp parallel for collapse (argv[1]=2) // expected-error {{expected ')'}} expected-note {{to match this '('}} for (int i = ST; i < N; i++) argv[0][i] = argv[0][i] - argv[0][i-ST]; #pragma omp parallel for collapse (1) for (int i = ST; i < N; i++) argv[0][i] = argv[0][i] - argv[0][i-ST]; #pragma omp parallel for collapse (N) // expected-error {{argument to 'collapse' clause must be a positive integer value}} for (T i = ST; i < N; i++) argv[0][i] = argv[0][i] - argv[0][i-ST]; #pragma omp parallel for collapse (2) // expected-note {{as specified in 'collapse' clause}} foo(); // expected-error {{expected 2 for loops after '#pragma omp parallel for'}} return argc; } int main(int argc, char **argv) { #pragma omp parallel for collapse // expected-error {{expected '(' after 'collapse'}} for (int i = 4; i < 12; i++) argv[0][i] = argv[0][i] - argv[0][i-4]; #pragma omp parallel for collapse ( // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}} for (int i = 4; i < 12; i++) argv[0][i] = argv[0][i] - argv[0][i-4]; #pragma omp parallel for collapse () // expected-error {{expected expression}} for (int i = 4; i < 12; i++) argv[0][i] = argv[0][i] - argv[0][i-4]; #pragma omp parallel for collapse (4 // expected-error {{expected ')'}} expected-note {{to match this '('}} expected-note {{as specified in 'collapse' clause}} for (int i = 4; i < 12; i++) argv[0][i] = argv[0][i] - argv[0][i-4]; // expected-error {{expected 4 for loops after '#pragma omp parallel for', but found only 1}} #pragma omp parallel for collapse (2+2)) // expected-warning {{extra tokens at the end of '#pragma omp parallel for' are ignored}} expected-note {{as specified in 'collapse' clause}} for (int i = 4; i < 12; i++) argv[0][i] = argv[0][i] - argv[0][i-4]; // expected-error {{expected 4 for loops after '#pragma omp parallel for', but found only 1}} #pragma omp parallel for collapse (foobool(1) > 0 ? 1 : 2) // expected-error {{expression is not an integral constant expression}} for (int i = 4; i < 12; i++) argv[0][i] = argv[0][i] - argv[0][i-4]; // expected-error@+3 {{expression is not an integral constant expression}} // expected-error@+2 2 {{directive '#pragma omp parallel for' cannot contain more than one 'collapse' clause}} // expected-error@+1 2 {{argument to 'collapse' clause must be a positive integer value}} #pragma omp parallel for collapse (foobool(argc)), collapse (true), collapse (-5) for (int i = 4; i < 12; i++) argv[0][i] = argv[0][i] - argv[0][i-4]; #pragma omp parallel for collapse (S1) // expected-error {{'S1' does not refer to a value}} for (int i = 4; i < 12; i++) argv[0][i] = argv[0][i] - argv[0][i-4]; // expected-error@+1 {{expression is not an integral constant expression}} #pragma omp parallel for collapse (argv[1]=2) // expected-error {{expected ')'}} expected-note {{to match this '('}} for (int i = 4; i < 12; i++) argv[0][i] = argv[0][i] - argv[0][i-4]; // expected-error@+3 {{statement after '#pragma omp parallel for' must be a for loop}} // expected-note@+1 {{in instantiation of function template specialization 'tmain<int, char, -1, -2>' requested here}} #pragma omp parallel for collapse(collapse(tmain<int, char, -1, -2>(argc, argv) // expected-error 2 {{expected ')'}} expected-note 2 {{to match this '('}} foo(); #pragma omp parallel for collapse (2) // expected-note {{as specified in 'collapse' clause}} foo(); // expected-error {{expected 2 for loops after '#pragma omp parallel for'}} // expected-note@+1 {{in instantiation of function template specialization 'tmain<int, char, 1, 0>' requested here}} return tmain<int, char, 1, 0>(argc, argv); }
Rapier-Foundation/rapier-script
src/rapierlang/test/OpenMP/parallel_for_collapse_messages.cpp
C++
mit
6,389