code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
/* * ProGuard -- shrinking, optimization, obfuscation, and preverification * of Java bytecode. * * Copyright (c) 2002-2017 Eric Lafortune @ GuardSquare * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package proguard.classfile.attribute.annotation.target.visitor; import proguard.classfile.*; import proguard.classfile.attribute.CodeAttribute; import proguard.classfile.attribute.annotation.*; import proguard.classfile.attribute.annotation.target.*; /** * This interface specifies the methods for a visitor of <code>TargetInfo</code> * objects. * * @author Eric Lafortune */ public interface TargetInfoVisitor { public void visitTypeParameterTargetInfo( Clazz clazz, TypeAnnotation typeAnnotation, TypeParameterTargetInfo typeParameterTargetInfo); public void visitTypeParameterTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, TypeParameterTargetInfo typeParameterTargetInfo); public void visitSuperTypeTargetInfo( Clazz clazz, TypeAnnotation typeAnnotation, SuperTypeTargetInfo superTypeTargetInfo); public void visitTypeParameterBoundTargetInfo(Clazz clazz, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo); public void visitTypeParameterBoundTargetInfo(Clazz clazz, Field field, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo); public void visitTypeParameterBoundTargetInfo(Clazz clazz, Method method, TypeAnnotation typeAnnotation, TypeParameterBoundTargetInfo typeParameterBoundTargetInfo); public void visitEmptyTargetInfo( Clazz clazz, Field field, TypeAnnotation typeAnnotation, EmptyTargetInfo emptyTargetInfo); public void visitEmptyTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, EmptyTargetInfo emptyTargetInfo); public void visitFormalParameterTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, FormalParameterTargetInfo formalParameterTargetInfo); public void visitThrowsTargetInfo( Clazz clazz, Method method, TypeAnnotation typeAnnotation, ThrowsTargetInfo throwsTargetInfo); public void visitLocalVariableTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, LocalVariableTargetInfo localVariableTargetInfo); public void visitCatchTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, CatchTargetInfo catchTargetInfo); public void visitOffsetTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, OffsetTargetInfo offsetTargetInfo); public void visitTypeArgumentTargetInfo( Clazz clazz, Method method, CodeAttribute codeAttribute, TypeAnnotation typeAnnotation, TypeArgumentTargetInfo typeArgumentTargetInfo); }
dslomov/bazel
third_party/java/proguard/proguard5.3.3/src/proguard/classfile/attribute/annotation/target/visitor/TargetInfoVisitor.java
Java
apache-2.0
3,983
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.mapred.IFile.Reader; import org.apache.hadoop.mapred.IFile.Writer; import org.apache.hadoop.mapred.Merger.Segment; import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.CryptoUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <code>BackupStore</code> is an utility class that is used to support * the mark-reset functionality of values iterator * * <p>It has two caches - a memory cache and a file cache where values are * stored as they are iterated, after a mark. On reset, values are retrieved * from these caches. Framework moves from the memory cache to the * file cache when the memory cache becomes full. * */ @InterfaceAudience.Private @InterfaceStability.Unstable public class BackupStore<K,V> { private static final Logger LOG = LoggerFactory.getLogger(BackupStore.class.getName()); private static final int MAX_VINT_SIZE = 9; private static final int EOF_MARKER_SIZE = 2 * MAX_VINT_SIZE; private final TaskAttemptID tid; private MemoryCache memCache; private FileCache fileCache; List<Segment<K,V>> segmentList = new LinkedList<Segment<K,V>>(); private int readSegmentIndex = 0; private int firstSegmentOffset = 0; private int currentKVOffset = 0; private int nextKVOffset = -1; private DataInputBuffer currentKey = null; private DataInputBuffer currentValue = new DataInputBuffer(); private DataInputBuffer currentDiskValue = new DataInputBuffer(); private boolean hasMore = false; private boolean inReset = false; private boolean clearMarkFlag = false; private boolean lastSegmentEOF = false; private Configuration conf; public BackupStore(Configuration conf, TaskAttemptID taskid) throws IOException { final float bufferPercent = conf.getFloat(JobContext.REDUCE_MARKRESET_BUFFER_PERCENT, 0f); if (bufferPercent > 1.0 || bufferPercent < 0.0) { throw new IOException(JobContext.REDUCE_MARKRESET_BUFFER_PERCENT + bufferPercent); } int maxSize = (int)Math.min( Runtime.getRuntime().maxMemory() * bufferPercent, Integer.MAX_VALUE); // Support an absolute size also. int tmp = conf.getInt(JobContext.REDUCE_MARKRESET_BUFFER_SIZE, 0); if (tmp > 0) { maxSize = tmp; } memCache = new MemoryCache(maxSize); fileCache = new FileCache(conf); tid = taskid; this.conf = conf; LOG.info("Created a new BackupStore with a memory of " + maxSize); } /** * Write the given K,V to the cache. * Write to memcache if space is available, else write to the filecache * @param key * @param value * @throws IOException */ public void write(DataInputBuffer key, DataInputBuffer value) throws IOException { assert (key != null && value != null); if (fileCache.isActive()) { fileCache.write(key, value); return; } if (memCache.reserveSpace(key, value)) { memCache.write(key, value); } else { fileCache.activate(); fileCache.write(key, value); } } public void mark() throws IOException { // We read one KV pair in advance in hasNext. // If hasNext has read the next KV pair from a new segment, but the // user has not called next() for that KV, then reset the readSegmentIndex // to the previous segment if (nextKVOffset == 0) { assert (readSegmentIndex != 0); assert (currentKVOffset != 0); readSegmentIndex --; } // just drop segments before the current active segment int i = 0; Iterator<Segment<K,V>> itr = segmentList.iterator(); while (itr.hasNext()) { Segment<K,V> s = itr.next(); if (i == readSegmentIndex) { break; } s.close(); itr.remove(); i++; LOG.debug("Dropping a segment"); } // FirstSegmentOffset is the offset in the current segment from where we // need to start reading on the next reset firstSegmentOffset = currentKVOffset; readSegmentIndex = 0; LOG.debug("Setting the FirsSegmentOffset to " + currentKVOffset); } public void reset() throws IOException { // Create a new segment for the previously written records only if we // are not already in the reset mode if (!inReset) { if (fileCache.isActive) { fileCache.createInDiskSegment(); } else { memCache.createInMemorySegment(); } } inReset = true; // Reset the segments to the correct position from where the next read // should begin. for (int i = 0; i < segmentList.size(); i++) { Segment<K,V> s = segmentList.get(i); if (s.inMemory()) { int offset = (i == 0) ? firstSegmentOffset : 0; s.getReader().reset(offset); } else { s.closeReader(); if (i == 0) { s.reinitReader(firstSegmentOffset); s.getReader().disableChecksumValidation(); } } } currentKVOffset = firstSegmentOffset; nextKVOffset = -1; readSegmentIndex = 0; hasMore = false; lastSegmentEOF = false; LOG.debug("Reset - First segment offset is " + firstSegmentOffset + " Segment List Size is " + segmentList.size()); } public boolean hasNext() throws IOException { if (lastSegmentEOF) { return false; } // We read the next KV from the cache to decide if there is any left. // Since hasNext can be called several times before the actual call to // next(), we use hasMore to avoid extra reads. hasMore is set to false // when the user actually consumes this record in next() if (hasMore) { return true; } Segment<K,V> seg = segmentList.get(readSegmentIndex); // Mark the current position. This would be set to currentKVOffset // when the user consumes this record in next(). nextKVOffset = (int) seg.getActualPosition(); if (seg.nextRawKey()) { currentKey = seg.getKey(); seg.getValue(currentValue); hasMore = true; return true; } else { if (!seg.inMemory()) { seg.closeReader(); } } // If this is the last segment, mark the lastSegmentEOF flag and return if (readSegmentIndex == segmentList.size() - 1) { nextKVOffset = -1; lastSegmentEOF = true; return false; } nextKVOffset = 0; readSegmentIndex ++; Segment<K,V> nextSegment = segmentList.get(readSegmentIndex); // We possibly are moving from a memory segment to a disk segment. // Reset so that we do not corrupt the in-memory segment buffer. // See HADOOP-5494 if (!nextSegment.inMemory()) { currentValue.reset(currentDiskValue.getData(), currentDiskValue.getLength()); nextSegment.init(null); } if (nextSegment.nextRawKey()) { currentKey = nextSegment.getKey(); nextSegment.getValue(currentValue); hasMore = true; return true; } else { throw new IOException("New segment did not have even one K/V"); } } public void next() throws IOException { if (!hasNext()) { throw new NoSuchElementException("iterate past last value"); } // Reset hasMore. See comment in hasNext() hasMore = false; currentKVOffset = nextKVOffset; nextKVOffset = -1; } public DataInputBuffer nextValue() { return currentValue; } public DataInputBuffer nextKey() { return currentKey; } public void reinitialize() throws IOException { if (segmentList.size() != 0) { clearSegmentList(); } memCache.reinitialize(true); fileCache.reinitialize(); readSegmentIndex = firstSegmentOffset = 0; currentKVOffset = 0; nextKVOffset = -1; hasMore = inReset = clearMarkFlag = false; } /** * This function is called the ValuesIterator when a mark is called * outside of a reset zone. */ public void exitResetMode() throws IOException { inReset = false; if (clearMarkFlag ) { // If a flag was set to clear mark, do the reinit now. // See clearMark() reinitialize(); return; } if (!fileCache.isActive) { memCache.reinitialize(false); } } /** For writing the first key and value bytes directly from the * value iterators, pass the current underlying output stream * @param length The length of the impending write */ public DataOutputStream getOutputStream(int length) throws IOException { if (memCache.reserveSpace(length)) { return memCache.dataOut; } else { fileCache.activate(); return fileCache.writer.getOutputStream(); } } /** This method is called by the valueIterators after writing the first * key and value bytes to the BackupStore * @param length */ public void updateCounters(int length) { if (fileCache.isActive) { fileCache.writer.updateCountersForExternalAppend(length); } else { memCache.usedSize += length; } } public void clearMark() throws IOException { if (inReset) { // If we are in the reset mode, we just mark a flag and come out // The actual re initialization would be done when we exit the reset // mode clearMarkFlag = true; } else { reinitialize(); } } private void clearSegmentList() throws IOException { for (Segment<K,V> segment: segmentList) { long len = segment.getLength(); segment.close(); if (segment.inMemory()) { memCache.unreserve(len); } } segmentList.clear(); } class MemoryCache { private DataOutputBuffer dataOut; private int blockSize; private int usedSize; private final BackupRamManager ramManager; // Memory cache is made up of blocks. private int defaultBlockSize = 1024 * 1024; public MemoryCache(int maxSize) { ramManager = new BackupRamManager(maxSize); if (maxSize < defaultBlockSize) { defaultBlockSize = maxSize; } } public void unreserve(long len) { ramManager.unreserve((int)len); } /** * Re-initialize the memory cache. * * @param clearAll If true, re-initialize the ramManager also. */ void reinitialize(boolean clearAll) { if (clearAll) { ramManager.reinitialize(); } int allocatedSize = createNewMemoryBlock(defaultBlockSize, defaultBlockSize); assert(allocatedSize == defaultBlockSize || allocatedSize == 0); LOG.debug("Created a new mem block of " + allocatedSize); } private int createNewMemoryBlock(int requestedSize, int minSize) { int allocatedSize = ramManager.reserve(requestedSize, minSize); usedSize = 0; if (allocatedSize == 0) { dataOut = null; blockSize = 0; } else { dataOut = new DataOutputBuffer(allocatedSize); blockSize = allocatedSize; } return allocatedSize; } /** * This method determines if there is enough space left in the * memory cache to write to the requested length + space for * subsequent EOF makers. * @param length * @return true if enough space is available */ boolean reserveSpace(int length) throws IOException { int availableSize = blockSize - usedSize; if (availableSize >= length + EOF_MARKER_SIZE) { return true; } // Not enough available. Close this block assert (!inReset); createInMemorySegment(); // Create a new block int tmp = Math.max(length + EOF_MARKER_SIZE, defaultBlockSize); availableSize = createNewMemoryBlock(tmp, (length + EOF_MARKER_SIZE)); return (availableSize == 0) ? false : true; } boolean reserveSpace(DataInputBuffer key, DataInputBuffer value) throws IOException { int keyLength = key.getLength() - key.getPosition(); int valueLength = value.getLength() - value.getPosition(); int requestedSize = keyLength + valueLength + WritableUtils.getVIntSize(keyLength) + WritableUtils.getVIntSize(valueLength); return reserveSpace(requestedSize); } /** * Write the key and value to the cache in the IFile format * @param key * @param value * @throws IOException */ public void write(DataInputBuffer key, DataInputBuffer value) throws IOException { int keyLength = key.getLength() - key.getPosition(); int valueLength = value.getLength() - value.getPosition(); WritableUtils.writeVInt(dataOut, keyLength); WritableUtils.writeVInt(dataOut, valueLength); dataOut.write(key.getData(), key.getPosition(), keyLength); dataOut.write(value.getData(), value.getPosition(), valueLength); usedSize += keyLength + valueLength + WritableUtils.getVIntSize(keyLength) + WritableUtils.getVIntSize(valueLength); LOG.debug("ID: " + segmentList.size() + " WRITE TO MEM"); } /** * This method creates a memory segment from the existing buffer * @throws IOException */ void createInMemorySegment () throws IOException { // If nothing was written in this block because the record size // was greater than the allocated block size, just return. if (usedSize == 0) { ramManager.unreserve(blockSize); return; } // spaceAvailable would have ensured that there is enough space // left for the EOF markers. assert ((blockSize - usedSize) >= EOF_MARKER_SIZE); WritableUtils.writeVInt(dataOut, IFile.EOF_MARKER); WritableUtils.writeVInt(dataOut, IFile.EOF_MARKER); usedSize += EOF_MARKER_SIZE; ramManager.unreserve(blockSize - usedSize); Reader<K, V> reader = new org.apache.hadoop.mapreduce.task.reduce.InMemoryReader<K, V>(null, (org.apache.hadoop.mapred.TaskAttemptID) tid, dataOut.getData(), 0, usedSize, conf); Segment<K, V> segment = new Segment<K, V>(reader, false); segmentList.add(segment); LOG.debug("Added Memory Segment to List. List Size is " + segmentList.size()); } } class FileCache { private LocalDirAllocator lDirAlloc; private final Configuration conf; private final FileSystem fs; private boolean isActive = false; private Path file = null; private IFile.Writer<K,V> writer = null; private int spillNumber = 0; public FileCache(Configuration conf) throws IOException { this.conf = conf; this.fs = FileSystem.getLocal(conf); this.lDirAlloc = new LocalDirAllocator(MRConfig.LOCAL_DIR); } void write(DataInputBuffer key, DataInputBuffer value) throws IOException { if (writer == null) { // If spillNumber is 0, we should have called activate and not // come here at all assert (spillNumber != 0); writer = createSpillFile(); } writer.append(key, value); LOG.debug("ID: " + segmentList.size() + " WRITE TO DISK"); } void reinitialize() { spillNumber = 0; writer = null; isActive = false; } void activate() throws IOException { isActive = true; writer = createSpillFile(); } void createInDiskSegment() throws IOException { assert (writer != null); writer.close(); Segment<K,V> s = new Segment<K, V>(conf, fs, file, null, true); writer = null; segmentList.add(s); LOG.debug("Disk Segment added to List. Size is " + segmentList.size()); } boolean isActive() { return isActive; } private Writer<K,V> createSpillFile() throws IOException { Path tmp = new Path(MRJobConfig.OUTPUT + "/backup_" + tid.getId() + "_" + (spillNumber++) + ".out"); LOG.info("Created file: " + tmp); file = lDirAlloc.getLocalPathForWrite(tmp.toUri().getPath(), -1, conf); FSDataOutputStream out = fs.create(file); out = CryptoUtils.wrapIfNecessary(conf, out); return new Writer<K, V>(conf, out, null, null, null, null, true); } } static class BackupRamManager implements RamManager { private int availableSize = 0; private final int maxSize; public BackupRamManager(int size) { availableSize = maxSize = size; } public boolean reserve(int requestedSize, InputStream in) { // Not used LOG.warn("Reserve(int, InputStream) not supported by BackupRamManager"); return false; } int reserve(int requestedSize) { if (availableSize == 0) { return 0; } int reservedSize = Math.min(requestedSize, availableSize); availableSize -= reservedSize; LOG.debug("Reserving: " + reservedSize + " Requested: " + requestedSize); return reservedSize; } int reserve(int requestedSize, int minSize) { if (availableSize < minSize) { LOG.debug("No space available. Available: " + availableSize + " MinSize: " + minSize); return 0; } else { return reserve(requestedSize); } } public void unreserve(int requestedSize) { availableSize += requestedSize; LOG.debug("Unreserving: " + requestedSize + ". Available: " + availableSize); } void reinitialize() { availableSize = maxSize; } } }
dennishuo/hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/BackupStore.java
Java
apache-2.0
18,966
//// [es6ClassTest8.ts] function f1(x:any) {return x;} class C { constructor() { var bar:any = (function() { return bar; // 'bar' should be resolvable }); var b = f1(f1(bar)); } } class Vector { static norm(v:Vector):Vector {return null;} static minus(v1:Vector, v2:Vector):Vector {return null;} static times(v1:Vector, v2:Vector):Vector {return null;} static cross(v1:Vector, v2:Vector):Vector {return null;} constructor(public x: number, public y: number, public z: number) { } static dot(v1:Vector, v2:Vector):Vector {return null;} } class Camera { public forward: Vector; public right: Vector; public up: Vector; constructor(public pos: Vector, lookAt: Vector) { var down = new Vector(0.0, -1.0, 0.0); this.forward = Vector.norm(Vector.minus(lookAt,this.pos)); this.right = Vector.times(down, Vector.norm(Vector.cross(this.forward, down))); this.up = Vector.times(down, Vector.norm(Vector.cross(this.forward, this.right))); } } //// [es6ClassTest8.js] function f1(x) { return x; } var C = /** @class */ (function () { function C() { var bar = (function () { return bar; // 'bar' should be resolvable }); var b = f1(f1(bar)); } return C; }()); var Vector = /** @class */ (function () { function Vector(x, y, z) { this.x = x; this.y = y; this.z = z; } Vector.norm = function (v) { return null; }; Vector.minus = function (v1, v2) { return null; }; Vector.times = function (v1, v2) { return null; }; Vector.cross = function (v1, v2) { return null; }; Vector.dot = function (v1, v2) { return null; }; return Vector; }()); var Camera = /** @class */ (function () { function Camera(pos, lookAt) { this.pos = pos; var down = new Vector(0.0, -1.0, 0.0); this.forward = Vector.norm(Vector.minus(lookAt, this.pos)); this.right = Vector.times(down, Vector.norm(Vector.cross(this.forward, down))); this.up = Vector.times(down, Vector.norm(Vector.cross(this.forward, this.right))); } return Camera; }());
donaldpipowitch/TypeScript
tests/baselines/reference/es6ClassTest8.js
JavaScript
apache-2.0
2,281
//----------------------------------------------------------------------- // <copyright file="SimpleJson.cs" company="The Outercurve Foundation"> // Copyright (c) 2011, The Outercurve Foundation. // // Licensed under the MIT License (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.opensource.org/licenses/mit-license.php // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // </copyright> // <author>Nathan Totten (ntotten.com), Jim Zimmerman (jimzimmerman.com) and Prabir Shrestha (prabir.me)</author> // <website>https://github.com/facebook-csharp-sdk/simple-json</website> //----------------------------------------------------------------------- // VERSION: // NOTE: uncomment the following line to make SimpleJson class internal. //#define SIMPLE_JSON_INTERNAL // NOTE: uncomment the following line to make JsonArray and JsonObject class internal. //#define SIMPLE_JSON_OBJARRAYINTERNAL // NOTE: uncomment the following line to enable dynamic support. //#define SIMPLE_JSON_DYNAMIC // NOTE: uncomment the following line to enable DataContract support. //#define SIMPLE_JSON_DATACONTRACT // NOTE: uncomment the following line to enable IReadOnlyCollection<T> and IReadOnlyList<T> support. //#define SIMPLE_JSON_READONLY_COLLECTIONS // NOTE: uncomment the following line if you are compiling under Window Metro style application/library. // usually already defined in properties #if UNITY_WSA && UNITY_WP8 #define NETFX_CORE #endif // If you are targetting WinStore, WP8 and NET4.5+ PCL make sure to #if UNITY_WP8 || UNITY_WP8_1 || UNITY_WSA // #define SIMPLE_JSON_TYPEINFO #endif // original json parsing code from http://techblog.procurios.nl/k/618/news/view/14605/14863/How-do-I-write-my-own-parser-for-JSON.html #if NETFX_CORE #define SIMPLE_JSON_TYPEINFO #endif using System; using System.CodeDom.Compiler; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; #if SIMPLE_JSON_DYNAMIC using System.Dynamic; #endif using System.Globalization; using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.Serialization; using System.Text; // ReSharper disable LoopCanBeConvertedToQuery // ReSharper disable RedundantExplicitArrayCreation // ReSharper disable SuggestUseVarKeywordEvident namespace PlayFab.Json { public enum NullValueHandling { Include, // Include null values when serializing and deserializing objects Ignore // Ignore null values when serializing and deserializing objects } /// <summary> /// Customize the json output of a field or property /// </summary> [AttributeUsage(AttributeTargets.Property | AttributeTargets.Field)] public class JsonProperty : Attribute { public string PropertyName = null; public NullValueHandling NullValueHandling = NullValueHandling.Include; } /// <summary> /// Represents the json array. /// </summary> [GeneratedCode("simple-json", "1.0.0")] [EditorBrowsable(EditorBrowsableState.Never)] [SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")] #if SIMPLE_JSON_OBJARRAYINTERNAL internal #else public #endif class JsonArray : List<object> { /// <summary> /// Initializes a new instance of the <see cref="JsonArray"/> class. /// </summary> public JsonArray() { } /// <summary> /// Initializes a new instance of the <see cref="JsonArray"/> class. /// </summary> /// <param name="capacity">The capacity of the json array.</param> public JsonArray(int capacity) : base(capacity) { } /// <summary> /// The json representation of the array. /// </summary> /// <returns>The json representation of the array.</returns> public override string ToString() { return PlayFabSimpleJson.SerializeObject(this) ?? string.Empty; } } /// <summary> /// Represents the json object. /// </summary> [GeneratedCode("simple-json", "1.0.0")] [EditorBrowsable(EditorBrowsableState.Never)] [SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")] #if SIMPLE_JSON_OBJARRAYINTERNAL internal #else public #endif class JsonObject : #if SIMPLE_JSON_DYNAMIC DynamicObject, #endif IDictionary<string, object> { private const int DICTIONARY_DEFAULT_SIZE = 16; /// <summary> /// The internal member dictionary. /// </summary> private readonly Dictionary<string, object> _members; /// <summary> /// Initializes a new instance of <see cref="JsonObject"/>. /// </summary> public JsonObject() { _members = new Dictionary<string, object>(DICTIONARY_DEFAULT_SIZE); } /// <summary> /// Initializes a new instance of <see cref="JsonObject"/>. /// </summary> /// <param name="comparer">The <see cref="T:System.Collections.Generic.IEqualityComparer`1"/> implementation to use when comparing keys, or null to use the default <see cref="T:System.Collections.Generic.EqualityComparer`1"/> for the type of the key.</param> public JsonObject(IEqualityComparer<string> comparer) { _members = new Dictionary<string, object>(comparer); } /// <summary> /// Gets the <see cref="System.Object"/> at the specified index. /// </summary> /// <value></value> public object this[int index] { get { return GetAtIndex(_members, index); } } internal static object GetAtIndex(IDictionary<string, object> obj, int index) { if (obj == null) throw new ArgumentNullException("obj"); if (index >= obj.Count) throw new ArgumentOutOfRangeException("index"); int i = 0; foreach (KeyValuePair<string, object> o in obj) if (i++ == index) return o.Value; return null; } /// <summary> /// Adds the specified key. /// </summary> /// <param name="key">The key.</param> /// <param name="value">The value.</param> public void Add(string key, object value) { _members.Add(key, value); } /// <summary> /// Determines whether the specified key contains key. /// </summary> /// <param name="key">The key.</param> /// <returns> /// <c>true</c> if the specified key contains key; otherwise, <c>false</c>. /// </returns> public bool ContainsKey(string key) { return _members.ContainsKey(key); } /// <summary> /// Gets the keys. /// </summary> /// <value>The keys.</value> public ICollection<string> Keys { get { return _members.Keys; } } /// <summary> /// Removes the specified key. /// </summary> /// <param name="key">The key.</param> /// <returns></returns> public bool Remove(string key) { return _members.Remove(key); } /// <summary> /// Tries the get value. /// </summary> /// <param name="key">The key.</param> /// <param name="value">The value.</param> /// <returns></returns> public bool TryGetValue(string key, out object value) { return _members.TryGetValue(key, out value); } /// <summary> /// Gets the values. /// </summary> /// <value>The values.</value> public ICollection<object> Values { get { return _members.Values; } } /// <summary> /// Gets or sets the <see cref="System.Object"/> with the specified key. /// </summary> /// <value></value> public object this[string key] { get { return _members[key]; } set { _members[key] = value; } } /// <summary> /// Adds the specified item. /// </summary> /// <param name="item">The item.</param> public void Add(KeyValuePair<string, object> item) { _members.Add(item.Key, item.Value); } /// <summary> /// Clears this instance. /// </summary> public void Clear() { _members.Clear(); } /// <summary> /// Determines whether [contains] [the specified item]. /// </summary> /// <param name="item">The item.</param> /// <returns> /// <c>true</c> if [contains] [the specified item]; otherwise, <c>false</c>. /// </returns> public bool Contains(KeyValuePair<string, object> item) { return _members.ContainsKey(item.Key) && _members[item.Key] == item.Value; } /// <summary> /// Copies to. /// </summary> /// <param name="array">The array.</param> /// <param name="arrayIndex">Index of the array.</param> public void CopyTo(KeyValuePair<string, object>[] array, int arrayIndex) { if (array == null) throw new ArgumentNullException("array"); int num = Count; foreach (KeyValuePair<string, object> kvp in _members) { array[arrayIndex++] = kvp; if (--num <= 0) return; } } /// <summary> /// Gets the count. /// </summary> /// <value>The count.</value> public int Count { get { return _members.Count; } } /// <summary> /// Gets a value indicating whether this instance is read only. /// </summary> /// <value> /// <c>true</c> if this instance is read only; otherwise, <c>false</c>. /// </value> public bool IsReadOnly { get { return false; } } /// <summary> /// Removes the specified item. /// </summary> /// <param name="item">The item.</param> /// <returns></returns> public bool Remove(KeyValuePair<string, object> item) { return _members.Remove(item.Key); } /// <summary> /// Gets the enumerator. /// </summary> /// <returns></returns> public IEnumerator<KeyValuePair<string, object>> GetEnumerator() { return _members.GetEnumerator(); } /// <summary> /// Returns an enumerator that iterates through a collection. /// </summary> /// <returns> /// An <see cref="T:System.Collections.IEnumerator"/> object that can be used to iterate through the collection. /// </returns> IEnumerator IEnumerable.GetEnumerator() { return _members.GetEnumerator(); } /// <summary> /// Returns a json <see cref="T:System.String"/> that represents the current <see cref="T:System.Object"/>. /// </summary> /// <returns> /// A json <see cref="T:System.String"/> that represents the current <see cref="T:System.Object"/>. /// </returns> public override string ToString() { return PlayFabSimpleJson.SerializeObject(_members); } #if SIMPLE_JSON_DYNAMIC /// <summary> /// Provides implementation for type conversion operations. Classes derived from the <see cref="T:System.Dynamic.DynamicObject"/> class can override this method to specify dynamic behavior for operations that convert an object from one type to another. /// </summary> /// <param name="binder">Provides information about the conversion operation. The binder.Type property provides the type to which the object must be converted. For example, for the statement (String)sampleObject in C# (CType(sampleObject, Type) in Visual Basic), where sampleObject is an instance of the class derived from the <see cref="T:System.Dynamic.DynamicObject"/> class, binder.Type returns the <see cref="T:System.String"/> type. The binder.Explicit property provides information about the kind of conversion that occurs. It returns true for explicit conversion and false for implicit conversion.</param> /// <param name="result">The result of the type conversion operation.</param> /// <returns> /// Alwasy returns true. /// </returns> public override bool TryConvert(ConvertBinder binder, out object result) { // <pex> if (binder == null) throw new ArgumentNullException("binder"); // </pex> Type targetType = binder.Type; if ((targetType == typeof(IEnumerable)) || (targetType == typeof(IEnumerable<KeyValuePair<string, object>>)) || (targetType == typeof(IDictionary<string, object>)) || (targetType == typeof(IDictionary))) { result = this; return true; } return base.TryConvert(binder, out result); } /// <summary> /// Provides the implementation for operations that delete an object member. This method is not intended for use in C# or Visual Basic. /// </summary> /// <param name="binder">Provides information about the deletion.</param> /// <returns> /// Alwasy returns true. /// </returns> public override bool TryDeleteMember(DeleteMemberBinder binder) { // <pex> if (binder == null) throw new ArgumentNullException("binder"); // </pex> return _members.Remove(binder.Name); } /// <summary> /// Provides the implementation for operations that get a value by index. Classes derived from the <see cref="T:System.Dynamic.DynamicObject"/> class can override this method to specify dynamic behavior for indexing operations. /// </summary> /// <param name="binder">Provides information about the operation.</param> /// <param name="indexes">The indexes that are used in the operation. For example, for the sampleObject[3] operation in C# (sampleObject(3) in Visual Basic), where sampleObject is derived from the DynamicObject class, <paramref name="indexes"/> is equal to 3.</param> /// <param name="result">The result of the index operation.</param> /// <returns> /// Alwasy returns true. /// </returns> public override bool TryGetIndex(GetIndexBinder binder, object[] indexes, out object result) { if (indexes == null) throw new ArgumentNullException("indexes"); if (indexes.Length == 1) { result = ((IDictionary<string, object>)this)[(string)indexes[0]]; return true; } result = null; return true; } /// <summary> /// Provides the implementation for operations that get member values. Classes derived from the <see cref="T:System.Dynamic.DynamicObject"/> class can override this method to specify dynamic behavior for operations such as getting a value for a property. /// </summary> /// <param name="binder">Provides information about the object that called the dynamic operation. The binder.Name property provides the name of the member on which the dynamic operation is performed. For example, for the Console.WriteLine(sampleObject.SampleProperty) statement, where sampleObject is an instance of the class derived from the <see cref="T:System.Dynamic.DynamicObject"/> class, binder.Name returns "SampleProperty". The binder.IgnoreCase property specifies whether the member name is case-sensitive.</param> /// <param name="result">The result of the get operation. For example, if the method is called for a property, you can assign the property value to <paramref name="result"/>.</param> /// <returns> /// Alwasy returns true. /// </returns> public override bool TryGetMember(GetMemberBinder binder, out object result) { object value; if (_members.TryGetValue(binder.Name, out value)) { result = value; return true; } result = null; return true; } /// <summary> /// Provides the implementation for operations that set a value by index. Classes derived from the <see cref="T:System.Dynamic.DynamicObject"/> class can override this method to specify dynamic behavior for operations that access objects by a specified index. /// </summary> /// <param name="binder">Provides information about the operation.</param> /// <param name="indexes">The indexes that are used in the operation. For example, for the sampleObject[3] = 10 operation in C# (sampleObject(3) = 10 in Visual Basic), where sampleObject is derived from the <see cref="T:System.Dynamic.DynamicObject"/> class, <paramref name="indexes"/> is equal to 3.</param> /// <param name="value">The value to set to the object that has the specified index. For example, for the sampleObject[3] = 10 operation in C# (sampleObject(3) = 10 in Visual Basic), where sampleObject is derived from the <see cref="T:System.Dynamic.DynamicObject"/> class, <paramref name="value"/> is equal to 10.</param> /// <returns> /// true if the operation is successful; otherwise, false. If this method returns false, the run-time binder of the language determines the behavior. (In most cases, a language-specific run-time exception is thrown. /// </returns> public override bool TrySetIndex(SetIndexBinder binder, object[] indexes, object value) { if (indexes == null) throw new ArgumentNullException("indexes"); if (indexes.Length == 1) { ((IDictionary<string, object>)this)[(string)indexes[0]] = value; return true; } return base.TrySetIndex(binder, indexes, value); } /// <summary> /// Provides the implementation for operations that set member values. Classes derived from the <see cref="T:System.Dynamic.DynamicObject"/> class can override this method to specify dynamic behavior for operations such as setting a value for a property. /// </summary> /// <param name="binder">Provides information about the object that called the dynamic operation. The binder.Name property provides the name of the member to which the value is being assigned. For example, for the statement sampleObject.SampleProperty = "Test", where sampleObject is an instance of the class derived from the <see cref="T:System.Dynamic.DynamicObject"/> class, binder.Name returns "SampleProperty". The binder.IgnoreCase property specifies whether the member name is case-sensitive.</param> /// <param name="value">The value to set to the member. For example, for sampleObject.SampleProperty = "Test", where sampleObject is an instance of the class derived from the <see cref="T:System.Dynamic.DynamicObject"/> class, the <paramref name="value"/> is "Test".</param> /// <returns> /// true if the operation is successful; otherwise, false. If this method returns false, the run-time binder of the language determines the behavior. (In most cases, a language-specific run-time exception is thrown.) /// </returns> public override bool TrySetMember(SetMemberBinder binder, object value) { // <pex> if (binder == null) throw new ArgumentNullException("binder"); // </pex> _members[binder.Name] = value; return true; } /// <summary> /// Returns the enumeration of all dynamic member names. /// </summary> /// <returns> /// A sequence that contains dynamic member names. /// </returns> public override IEnumerable<string> GetDynamicMemberNames() { foreach (var key in Keys) yield return key; } #endif } /// <summary> /// This class encodes and decodes JSON strings. /// Spec. details, see http://www.json.org/ /// /// JSON uses Arrays and Objects. These correspond here to the datatypes JsonArray(IList&lt;object>) and JsonObject(IDictionary&lt;string,object>). /// All numbers are parsed to doubles. /// </summary> [GeneratedCode("simple-json", "1.0.0")] #if SIMPLE_JSON_INTERNAL internal #else public #endif static class PlayFabSimpleJson { private enum TokenType : byte { NONE = 0, CURLY_OPEN = 1, CURLY_CLOSE = 2, SQUARED_OPEN = 3, SQUARED_CLOSE = 4, COLON = 5, COMMA = 6, STRING = 7, NUMBER = 8, TRUE = 9, FALSE = 10, NULL = 11, } private const int BUILDER_INIT = 2000; private static readonly char[] EscapeTable; private static readonly char[] EscapeCharacters = new char[] { '"', '\\', '\b', '\f', '\n', '\r', '\t' }; // private static readonly string EscapeCharactersString = new string(EscapeCharacters); internal static readonly List<Type> NumberTypes = new List<Type> { typeof(bool), typeof(byte), typeof(ushort), typeof(uint), typeof(ulong), typeof(sbyte), typeof(short), typeof(int), typeof(long), typeof(double), typeof(float), typeof(decimal) }; // Performance stuff [ThreadStatic] private static StringBuilder _serializeObjectBuilder; [ThreadStatic] private static StringBuilder _parseStringBuilder; static PlayFabSimpleJson() { EscapeTable = new char[93]; EscapeTable['"'] = '"'; EscapeTable['\\'] = '\\'; EscapeTable['\b'] = 'b'; EscapeTable['\f'] = 'f'; EscapeTable['\n'] = 'n'; EscapeTable['\r'] = 'r'; EscapeTable['\t'] = 't'; } /// <summary> /// Parses the string json into a value /// </summary> /// <param name="json">A JSON string.</param> /// <returns>An IList&lt;object>, a IDictionary&lt;string,object>, a double, a string, null, true, or false</returns> public static object DeserializeObject(string json) { object obj; if (TryDeserializeObject(json, out obj)) return obj; throw new SerializationException("Invalid JSON string"); } /// <summary> /// Try parsing the json string into a value. /// </summary> /// <param name="json"> /// A JSON string. /// </param> /// <param name="obj"> /// The object. /// </param> /// <returns> /// Returns true if successfull otherwise false. /// </returns> [SuppressMessage("Microsoft.Design", "CA1007:UseGenericsWhereAppropriate", Justification = "Need to support .NET 2")] public static bool TryDeserializeObject(string json, out object obj) { bool success = true; if (json != null) { int index = 0; obj = ParseValue(json, ref index, ref success); } else obj = null; return success; } public static object DeserializeObject(string json, Type type, IJsonSerializerStrategy jsonSerializerStrategy = null) { object jsonObject = DeserializeObject(json); if (type == null || jsonObject != null && ReflectionUtils.IsAssignableFrom(jsonObject.GetType(), type)) return jsonObject; return (jsonSerializerStrategy ?? CurrentJsonSerializerStrategy).DeserializeObject(jsonObject, type); } public static T DeserializeObject<T>(string json, IJsonSerializerStrategy jsonSerializerStrategy = null) { return (T)DeserializeObject(json, typeof(T), jsonSerializerStrategy); } /// <summary> /// Converts a IDictionary&lt;string,object> / IList&lt;object> object into a JSON string /// </summary> /// <param name="json">A IDictionary&lt;string,object> / IList&lt;object></param> /// <param name="jsonSerializerStrategy">Serializer strategy to use</param> /// <returns>A JSON encoded string, or null if object 'json' is not serializable</returns> public static string SerializeObject(object json, IJsonSerializerStrategy jsonSerializerStrategy = null) { if (_serializeObjectBuilder == null) _serializeObjectBuilder = new StringBuilder(BUILDER_INIT); _serializeObjectBuilder.Length = 0; if (jsonSerializerStrategy == null) jsonSerializerStrategy = CurrentJsonSerializerStrategy; bool success = SerializeValue(jsonSerializerStrategy, json, _serializeObjectBuilder); return (success ? _serializeObjectBuilder.ToString() : null); } public static string EscapeToJavascriptString(string jsonString) { if (string.IsNullOrEmpty(jsonString)) return jsonString; StringBuilder sb = new StringBuilder(); char c; for (int i = 0; i < jsonString.Length;) { c = jsonString[i++]; if (c == '\\') { int remainingLength = jsonString.Length - i; if (remainingLength >= 2) { char lookahead = jsonString[i]; if (lookahead == '\\') { sb.Append('\\'); ++i; } else if (lookahead == '"') { sb.Append("\""); ++i; } else if (lookahead == 't') { sb.Append('\t'); ++i; } else if (lookahead == 'b') { sb.Append('\b'); ++i; } else if (lookahead == 'n') { sb.Append('\n'); ++i; } else if (lookahead == 'r') { sb.Append('\r'); ++i; } } } else { sb.Append(c); } } return sb.ToString(); } static IDictionary<string, object> ParseObject(string json, ref int index, ref bool success) { IDictionary<string, object> table = new JsonObject(); TokenType token; // { NextToken(json, ref index); bool done = false; while (!done) { token = LookAhead(json, index); if (token == TokenType.NONE) { success = false; return null; } else if (token == TokenType.COMMA) NextToken(json, ref index); else if (token == TokenType.CURLY_CLOSE) { NextToken(json, ref index); return table; } else { // name string name = ParseString(json, ref index, ref success); if (!success) { success = false; return null; } // : token = NextToken(json, ref index); if (token != TokenType.COLON) { success = false; return null; } // value object value = ParseValue(json, ref index, ref success); if (!success) { success = false; return null; } table[name] = value; } } return table; } static JsonArray ParseArray(string json, ref int index, ref bool success) { JsonArray array = new JsonArray(); // [ NextToken(json, ref index); bool done = false; while (!done) { TokenType token = LookAhead(json, index); if (token == TokenType.NONE) { success = false; return null; } else if (token == TokenType.COMMA) NextToken(json, ref index); else if (token == TokenType.SQUARED_CLOSE) { NextToken(json, ref index); break; } else { object value = ParseValue(json, ref index, ref success); if (!success) return null; array.Add(value); } } return array; } static object ParseValue(string json, ref int index, ref bool success) { switch (LookAhead(json, index)) { case TokenType.STRING: return ParseString(json, ref index, ref success); case TokenType.NUMBER: return ParseNumber(json, ref index, ref success); case TokenType.CURLY_OPEN: return ParseObject(json, ref index, ref success); case TokenType.SQUARED_OPEN: return ParseArray(json, ref index, ref success); case TokenType.TRUE: NextToken(json, ref index); return true; case TokenType.FALSE: NextToken(json, ref index); return false; case TokenType.NULL: NextToken(json, ref index); return null; case TokenType.NONE: break; } success = false; return null; } static string ParseString(string json, ref int index, ref bool success) { if (_parseStringBuilder == null) _parseStringBuilder = new StringBuilder(BUILDER_INIT); _parseStringBuilder.Length = 0; EatWhitespace(json, ref index); // " char c = json[index++]; bool complete = false; while (!complete) { if (index == json.Length) break; c = json[index++]; if (c == '"') { complete = true; break; } else if (c == '\\') { if (index == json.Length) break; c = json[index++]; if (c == '"') _parseStringBuilder.Append('"'); else if (c == '\\') _parseStringBuilder.Append('\\'); else if (c == '/') _parseStringBuilder.Append('/'); else if (c == 'b') _parseStringBuilder.Append('\b'); else if (c == 'f') _parseStringBuilder.Append('\f'); else if (c == 'n') _parseStringBuilder.Append('\n'); else if (c == 'r') _parseStringBuilder.Append('\r'); else if (c == 't') _parseStringBuilder.Append('\t'); else if (c == 'u') { int remainingLength = json.Length - index; if (remainingLength >= 4) { // parse the 32 bit hex into an integer codepoint uint codePoint; if (!(success = UInt32.TryParse(json.Substring(index, 4), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out codePoint))) return ""; // convert the integer codepoint to a unicode char and add to string if (0xD800 <= codePoint && codePoint <= 0xDBFF) // if high surrogate { index += 4; // skip 4 chars remainingLength = json.Length - index; if (remainingLength >= 6) { uint lowCodePoint; if (json.Substring(index, 2) == "\\u" && UInt32.TryParse(json.Substring(index + 2, 4), NumberStyles.HexNumber, CultureInfo.InvariantCulture, out lowCodePoint)) { if (0xDC00 <= lowCodePoint && lowCodePoint <= 0xDFFF) // if low surrogate { _parseStringBuilder.Append((char)codePoint); _parseStringBuilder.Append((char)lowCodePoint); index += 6; // skip 6 chars continue; } } } success = false; // invalid surrogate pair return ""; } _parseStringBuilder.Append(ConvertFromUtf32((int)codePoint)); // skip 4 chars index += 4; } else break; } } else _parseStringBuilder.Append(c); } if (!complete) { success = false; return null; } return _parseStringBuilder.ToString(); } private static string ConvertFromUtf32(int utf32) { // http://www.java2s.com/Open-Source/CSharp/2.6.4-mono-.net-core/System/System/Char.cs.htm if (utf32 < 0 || utf32 > 0x10FFFF) throw new ArgumentOutOfRangeException("utf32", "The argument must be from 0 to 0x10FFFF."); if (0xD800 <= utf32 && utf32 <= 0xDFFF) throw new ArgumentOutOfRangeException("utf32", "The argument must not be in surrogate pair range."); if (utf32 < 0x10000) return new string((char)utf32, 1); utf32 -= 0x10000; return new string(new char[] { (char)((utf32 >> 10) + 0xD800), (char)(utf32 % 0x0400 + 0xDC00) }); } static object ParseNumber(string json, ref int index, ref bool success) { EatWhitespace(json, ref index); int lastIndex = GetLastIndexOfNumber(json, index); int charLength = (lastIndex - index) + 1; object returnNumber; string str = json.Substring(index, charLength); if (str.IndexOf(".", StringComparison.OrdinalIgnoreCase) != -1 || str.IndexOf("e", StringComparison.OrdinalIgnoreCase) != -1) { double number; success = double.TryParse(json.Substring(index, charLength), NumberStyles.Any, CultureInfo.InvariantCulture, out number); returnNumber = number; } else if (str.IndexOf("-", StringComparison.OrdinalIgnoreCase) == -1) { ulong number; success = ulong.TryParse(json.Substring(index, charLength), NumberStyles.Any, CultureInfo.InvariantCulture, out number); returnNumber = number; } else { long number; success = long.TryParse(json.Substring(index, charLength), NumberStyles.Any, CultureInfo.InvariantCulture, out number); returnNumber = number; } index = lastIndex + 1; return returnNumber; } static int GetLastIndexOfNumber(string json, int index) { int lastIndex; for (lastIndex = index; lastIndex < json.Length; lastIndex++) if ("0123456789+-.eE".IndexOf(json[lastIndex]) == -1) break; return lastIndex - 1; } static void EatWhitespace(string json, ref int index) { for (; index < json.Length; index++) if (" \t\n\r\b\f".IndexOf(json[index]) == -1) break; } static TokenType LookAhead(string json, int index) { int saveIndex = index; return NextToken(json, ref saveIndex); } [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")] static TokenType NextToken(string json, ref int index) { EatWhitespace(json, ref index); if (index == json.Length) return TokenType.NONE; char c = json[index]; index++; switch (c) { case '{': return TokenType.CURLY_OPEN; case '}': return TokenType.CURLY_CLOSE; case '[': return TokenType.SQUARED_OPEN; case ']': return TokenType.SQUARED_CLOSE; case ',': return TokenType.COMMA; case '"': return TokenType.STRING; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case '-': return TokenType.NUMBER; case ':': return TokenType.COLON; } index--; int remainingLength = json.Length - index; // false if (remainingLength >= 5) { if (json[index] == 'f' && json[index + 1] == 'a' && json[index + 2] == 'l' && json[index + 3] == 's' && json[index + 4] == 'e') { index += 5; return TokenType.FALSE; } } // true if (remainingLength >= 4) { if (json[index] == 't' && json[index + 1] == 'r' && json[index + 2] == 'u' && json[index + 3] == 'e') { index += 4; return TokenType.TRUE; } } // null if (remainingLength >= 4) { if (json[index] == 'n' && json[index + 1] == 'u' && json[index + 2] == 'l' && json[index + 3] == 'l') { index += 4; return TokenType.NULL; } } return TokenType.NONE; } static bool SerializeValue(IJsonSerializerStrategy jsonSerializerStrategy, object value, StringBuilder builder) { bool success = true; string stringValue = value as string; if (value == null) builder.Append("null"); else if (stringValue != null) success = SerializeString(stringValue, builder); else { IDictionary<string, object> dict = value as IDictionary<string, object>; Type type = value.GetType(); Type[] genArgs = ReflectionUtils.GetGenericTypeArguments(type); var isStringKeyDictionary = type.GetTypeInfo().IsGenericType && type.GetGenericTypeDefinition() == typeof(Dictionary<,>) && genArgs[0] == typeof(string); if (isStringKeyDictionary) { var strDictValue = value as IDictionary; success = SerializeObject(jsonSerializerStrategy, strDictValue.Keys, strDictValue.Values, builder); } else if (dict != null) { success = SerializeObject(jsonSerializerStrategy, dict.Keys, dict.Values, builder); } else { IDictionary<string, string> stringDictionary = value as IDictionary<string, string>; if (stringDictionary != null) { success = SerializeObject(jsonSerializerStrategy, stringDictionary.Keys, stringDictionary.Values, builder); } else { IEnumerable enumerableValue = value as IEnumerable; if (enumerableValue != null) success = SerializeArray(jsonSerializerStrategy, enumerableValue, builder); else if (IsNumeric(value)) success = SerializeNumber(value, builder); else if (value is bool) builder.Append((bool)value ? "true" : "false"); else { object serializedObject; success = jsonSerializerStrategy.TrySerializeNonPrimitiveObject(value, out serializedObject); if (success) SerializeValue(jsonSerializerStrategy, serializedObject, builder); } } } } return success; } static bool SerializeObject(IJsonSerializerStrategy jsonSerializerStrategy, IEnumerable keys, IEnumerable values, StringBuilder builder) { builder.Append("{"); IEnumerator ke = keys.GetEnumerator(); IEnumerator ve = values.GetEnumerator(); bool first = true; while (ke.MoveNext() && ve.MoveNext()) { object key = ke.Current; object value = ve.Current; if (!first) builder.Append(","); string stringKey = key as string; if (stringKey != null) SerializeString(stringKey, builder); else if (!SerializeValue(jsonSerializerStrategy, value, builder)) return false; builder.Append(":"); if (!SerializeValue(jsonSerializerStrategy, value, builder)) return false; first = false; } builder.Append("}"); return true; } static bool SerializeArray(IJsonSerializerStrategy jsonSerializerStrategy, IEnumerable anArray, StringBuilder builder) { builder.Append("["); bool first = true; foreach (object value in anArray) { if (!first) builder.Append(","); if (!SerializeValue(jsonSerializerStrategy, value, builder)) return false; first = false; } builder.Append("]"); return true; } static bool SerializeString(string aString, StringBuilder builder) { // Happy path if there's nothing to be escaped. IndexOfAny is highly optimized (and unmanaged) if (aString.IndexOfAny(EscapeCharacters) == -1) { builder.Append('"'); builder.Append(aString); builder.Append('"'); return true; } builder.Append('"'); int safeCharacterCount = 0; char[] charArray = aString.ToCharArray(); for (int i = 0; i < charArray.Length; i++) { char c = charArray[i]; // Non ascii characters are fine, buffer them up and send them to the builder // in larger chunks if possible. The escape table is a 1:1 translation table // with \0 [default(char)] denoting a safe character. if (c >= EscapeTable.Length || EscapeTable[c] == default(char)) { safeCharacterCount++; } else { if (safeCharacterCount > 0) { builder.Append(charArray, i - safeCharacterCount, safeCharacterCount); safeCharacterCount = 0; } builder.Append('\\'); builder.Append(EscapeTable[c]); } } if (safeCharacterCount > 0) { builder.Append(charArray, charArray.Length - safeCharacterCount, safeCharacterCount); } builder.Append('"'); return true; } static bool SerializeNumber(object number, StringBuilder builder) { if (number is decimal) builder.Append(((decimal)number).ToString("R", CultureInfo.InvariantCulture)); else if (number is double) builder.Append(((double)number).ToString("R", CultureInfo.InvariantCulture)); else if (number is float) builder.Append(((float)number).ToString("R", CultureInfo.InvariantCulture)); else if (NumberTypes.IndexOf(number.GetType()) != -1) builder.Append(number); return true; } /// <summary> /// Determines if a given object is numeric in any way /// (can be integer, double, null, etc). /// </summary> static bool IsNumeric(object value) { if (value is sbyte) return true; if (value is byte) return true; if (value is short) return true; if (value is ushort) return true; if (value is int) return true; if (value is uint) return true; if (value is long) return true; if (value is ulong) return true; if (value is float) return true; if (value is double) return true; if (value is decimal) return true; return false; } private static IJsonSerializerStrategy _currentJsonSerializerStrategy; public static IJsonSerializerStrategy CurrentJsonSerializerStrategy { get { return _currentJsonSerializerStrategy ?? (_currentJsonSerializerStrategy = #if SIMPLE_JSON_DATACONTRACT DataContractJsonSerializerStrategy #else PocoJsonSerializerStrategy #endif ); } set { _currentJsonSerializerStrategy = value; } } private static PocoJsonSerializerStrategy _pocoJsonSerializerStrategy; [EditorBrowsable(EditorBrowsableState.Advanced)] public static PocoJsonSerializerStrategy PocoJsonSerializerStrategy { get { return _pocoJsonSerializerStrategy ?? (_pocoJsonSerializerStrategy = new PocoJsonSerializerStrategy()); } } #if SIMPLE_JSON_DATACONTRACT private static DataContractJsonSerializerStrategy _dataContractJsonSerializerStrategy; [System.ComponentModel.EditorBrowsable(EditorBrowsableState.Advanced)] public static DataContractJsonSerializerStrategy DataContractJsonSerializerStrategy { get { return _dataContractJsonSerializerStrategy ?? (_dataContractJsonSerializerStrategy = new DataContractJsonSerializerStrategy()); } } #endif } [GeneratedCode("simple-json", "1.0.0")] #if SIMPLE_JSON_INTERNAL internal #else public #endif interface IJsonSerializerStrategy { [SuppressMessage("Microsoft.Design", "CA1007:UseGenericsWhereAppropriate", Justification = "Need to support .NET 2")] bool TrySerializeNonPrimitiveObject(object input, out object output); object DeserializeObject(object value, Type type); } [GeneratedCode("simple-json", "1.0.0")] #if SIMPLE_JSON_INTERNAL internal #else public #endif class PocoJsonSerializerStrategy : IJsonSerializerStrategy { internal IDictionary<Type, ReflectionUtils.ConstructorDelegate> ConstructorCache; internal IDictionary<Type, IDictionary<MemberInfo, ReflectionUtils.GetDelegate>> GetCache; internal IDictionary<Type, IDictionary<string, KeyValuePair<Type, ReflectionUtils.SetDelegate>>> SetCache; internal static readonly Type[] EmptyTypes = new Type[0]; internal static readonly Type[] ArrayConstructorParameterTypes = new Type[] { typeof(int) }; private static readonly string[] Iso8601Format = new string[] { @"yyyy-MM-dd\THH:mm:ss.FFFFFFF\Z", @"yyyy-MM-dd\THH:mm:ss\Z", @"yyyy-MM-dd\THH:mm:ssK" }; public PocoJsonSerializerStrategy() { ConstructorCache = new ReflectionUtils.ThreadSafeDictionary<Type, ReflectionUtils.ConstructorDelegate>(ContructorDelegateFactory); GetCache = new ReflectionUtils.ThreadSafeDictionary<Type, IDictionary<MemberInfo, ReflectionUtils.GetDelegate>>(GetterValueFactory); SetCache = new ReflectionUtils.ThreadSafeDictionary<Type, IDictionary<string, KeyValuePair<Type, ReflectionUtils.SetDelegate>>>(SetterValueFactory); } protected virtual string MapClrMemberNameToJsonFieldName(MemberInfo memberInfo) { // TODO: Optimize and/or cache foreach (JsonProperty eachAttr in memberInfo.GetCustomAttributes(typeof(JsonProperty), true)) if (!string.IsNullOrEmpty(eachAttr.PropertyName)) return eachAttr.PropertyName; return memberInfo.Name; } protected virtual void MapClrMemberNameToJsonFieldName(MemberInfo memberInfo, out string jsonName, out JsonProperty jsonProp) { jsonName = memberInfo.Name; jsonProp = null; // TODO: Optimize and/or cache foreach (JsonProperty eachAttr in memberInfo.GetCustomAttributes(typeof(JsonProperty), true)) { jsonProp = eachAttr; if (!string.IsNullOrEmpty(eachAttr.PropertyName)) jsonName = eachAttr.PropertyName; } } internal virtual ReflectionUtils.ConstructorDelegate ContructorDelegateFactory(Type key) { return ReflectionUtils.GetContructor(key, key.IsArray ? ArrayConstructorParameterTypes : EmptyTypes); } internal virtual IDictionary<MemberInfo, ReflectionUtils.GetDelegate> GetterValueFactory(Type type) { IDictionary<MemberInfo, ReflectionUtils.GetDelegate> result = new Dictionary<MemberInfo, ReflectionUtils.GetDelegate>(); foreach (PropertyInfo propertyInfo in ReflectionUtils.GetProperties(type)) { if (propertyInfo.CanRead) { MethodInfo getMethod = ReflectionUtils.GetGetterMethodInfo(propertyInfo); if (getMethod.IsStatic || !getMethod.IsPublic) continue; result[propertyInfo] = ReflectionUtils.GetGetMethod(propertyInfo); } } foreach (FieldInfo fieldInfo in ReflectionUtils.GetFields(type)) { if (fieldInfo.IsStatic || !fieldInfo.IsPublic) continue; result[fieldInfo] = ReflectionUtils.GetGetMethod(fieldInfo); } return result; } internal virtual IDictionary<string, KeyValuePair<Type, ReflectionUtils.SetDelegate>> SetterValueFactory(Type type) { IDictionary<string, KeyValuePair<Type, ReflectionUtils.SetDelegate>> result = new Dictionary<string, KeyValuePair<Type, ReflectionUtils.SetDelegate>>(); foreach (PropertyInfo propertyInfo in ReflectionUtils.GetProperties(type)) { if (propertyInfo.CanWrite) { MethodInfo setMethod = ReflectionUtils.GetSetterMethodInfo(propertyInfo); if (setMethod.IsStatic || !setMethod.IsPublic) continue; result[MapClrMemberNameToJsonFieldName(propertyInfo)] = new KeyValuePair<Type, ReflectionUtils.SetDelegate>(propertyInfo.PropertyType, ReflectionUtils.GetSetMethod(propertyInfo)); } } foreach (FieldInfo fieldInfo in ReflectionUtils.GetFields(type)) { if (fieldInfo.IsInitOnly || fieldInfo.IsStatic || !fieldInfo.IsPublic) continue; result[MapClrMemberNameToJsonFieldName(fieldInfo)] = new KeyValuePair<Type, ReflectionUtils.SetDelegate>(fieldInfo.FieldType, ReflectionUtils.GetSetMethod(fieldInfo)); } return result; } public virtual bool TrySerializeNonPrimitiveObject(object input, out object output) { return TrySerializeKnownTypes(input, out output) || TrySerializeUnknownTypes(input, out output); } [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")] public virtual object DeserializeObject(object value, Type type) { if (type == null) throw new ArgumentNullException("type"); if (value != null && type.IsInstanceOfType(value)) return value; string str = value as string; if (type == typeof(Guid) && string.IsNullOrEmpty(str)) return default(Guid); if (value == null) return null; object obj = null; if (str != null) { if (str.Length != 0) // We know it can't be null now. { if (type == typeof(DateTime) || (ReflectionUtils.IsNullableType(type) && Nullable.GetUnderlyingType(type) == typeof(DateTime))) return DateTime.ParseExact(str, Iso8601Format, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal); if (type == typeof(DateTimeOffset) || (ReflectionUtils.IsNullableType(type) && Nullable.GetUnderlyingType(type) == typeof(DateTimeOffset))) return DateTimeOffset.ParseExact(str, Iso8601Format, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal); if (type == typeof(Guid) || (ReflectionUtils.IsNullableType(type) && Nullable.GetUnderlyingType(type) == typeof(Guid))) return new Guid(str); if (type == typeof(Uri)) { bool isValid = Uri.IsWellFormedUriString(str, UriKind.RelativeOrAbsolute); Uri result; if (isValid && Uri.TryCreate(str, UriKind.RelativeOrAbsolute, out result)) return result; return null; } if (type == typeof(string)) return str; return Convert.ChangeType(str, type, CultureInfo.InvariantCulture); } else { if (type == typeof(Guid)) obj = default(Guid); else if (ReflectionUtils.IsNullableType(type) && Nullable.GetUnderlyingType(type) == typeof(Guid)) obj = null; else obj = str; } // Empty string case if (!ReflectionUtils.IsNullableType(type) && Nullable.GetUnderlyingType(type) == typeof(Guid)) return str; } else if (value is bool) return value; bool valueIsLong = value is long; bool valueIsUlong = value is ulong; bool valueIsDouble = value is double; Type nullableType = Nullable.GetUnderlyingType(type); if (nullableType != null && PlayFabSimpleJson.NumberTypes.IndexOf(nullableType) != -1) type = nullableType; // Just use the regular type for the conversion bool isNumberType = PlayFabSimpleJson.NumberTypes.IndexOf(type) != -1; bool isEnumType = type.GetTypeInfo().IsEnum; if ((valueIsLong && type == typeof(long)) || (valueIsUlong && type == typeof(ulong)) || (valueIsDouble && type == typeof(double))) return value; if ((valueIsLong || valueIsUlong || valueIsDouble) && isEnumType) return Enum.ToObject(type, Convert.ChangeType(value, Enum.GetUnderlyingType(type), CultureInfo.InvariantCulture)); if ((valueIsLong || valueIsUlong || valueIsDouble) && isNumberType) return Convert.ChangeType(value, type, CultureInfo.InvariantCulture); IDictionary<string, object> objects = value as IDictionary<string, object>; if (objects != null) { IDictionary<string, object> jsonObject = objects; if (ReflectionUtils.IsTypeDictionary(type)) { // if dictionary then Type[] types = ReflectionUtils.GetGenericTypeArguments(type); Type keyType = types[0]; Type valueType = types[1]; Type genericType = typeof(Dictionary<,>).MakeGenericType(keyType, valueType); IDictionary dict = (IDictionary)ConstructorCache[genericType](); foreach (KeyValuePair<string, object> kvp in jsonObject) dict.Add(kvp.Key, DeserializeObject(kvp.Value, valueType)); obj = dict; } else { if (type == typeof(object)) obj = value; else { obj = ConstructorCache[type](); foreach (KeyValuePair<string, KeyValuePair<Type, ReflectionUtils.SetDelegate>> setter in SetCache[type]) { object jsonValue; if (jsonObject.TryGetValue(setter.Key, out jsonValue)) { jsonValue = DeserializeObject(jsonValue, setter.Value.Key); setter.Value.Value(obj, jsonValue); } } } } } else { IList<object> valueAsList = value as IList<object>; if (valueAsList != null) { IList<object> jsonObject = valueAsList; IList list = null; if (type.IsArray) { list = (IList)ConstructorCache[type](jsonObject.Count); int i = 0; foreach (object o in jsonObject) list[i++] = DeserializeObject(o, type.GetElementType()); } else if (ReflectionUtils.IsTypeGenericeCollectionInterface(type) || ReflectionUtils.IsAssignableFrom(typeof(IList), type) || type == typeof(object)) { Type innerType = ReflectionUtils.GetGenericListElementType(type); ReflectionUtils.ConstructorDelegate ctrDelegate = null; if (type != typeof(object)) ctrDelegate = ConstructorCache[type]; if (ctrDelegate == null) ctrDelegate = ConstructorCache[typeof(List<>).MakeGenericType(innerType)]; list = (IList)ctrDelegate(); foreach (object o in jsonObject) list.Add(DeserializeObject(o, innerType)); } obj = list; } return obj; } if (ReflectionUtils.IsNullableType(type)) return ReflectionUtils.ToNullableType(obj, type); return obj; } protected virtual object SerializeEnum(Enum p) { return Convert.ToDouble(p, CultureInfo.InvariantCulture); } [SuppressMessage("Microsoft.Design", "CA1007:UseGenericsWhereAppropriate", Justification = "Need to support .NET 2")] protected virtual bool TrySerializeKnownTypes(object input, out object output) { bool returnValue = true; if (input is DateTime) output = ((DateTime)input).ToUniversalTime().ToString(Iso8601Format[0], CultureInfo.InvariantCulture); else if (input is DateTimeOffset) output = ((DateTimeOffset)input).ToUniversalTime().ToString(Iso8601Format[0], CultureInfo.InvariantCulture); else if (input is Guid) output = ((Guid)input).ToString("D"); else if (input is Uri) output = input.ToString(); else { Enum inputEnum = input as Enum; if (inputEnum != null) output = SerializeEnum(inputEnum); else { returnValue = false; output = null; } } return returnValue; } [SuppressMessage("Microsoft.Design", "CA1007:UseGenericsWhereAppropriate", Justification = "Need to support .NET 2")] protected virtual bool TrySerializeUnknownTypes(object input, out object output) { if (input == null) throw new ArgumentNullException("input"); output = null; Type type = input.GetType(); if (type.FullName == null) return false; IDictionary<string, object> obj = new JsonObject(); IDictionary<MemberInfo, ReflectionUtils.GetDelegate> getters = GetCache[type]; foreach (KeyValuePair<MemberInfo, ReflectionUtils.GetDelegate> getter in getters) { if (getter.Value == null) continue; string jsonKey; JsonProperty jsonProp; MapClrMemberNameToJsonFieldName(getter.Key, out jsonKey, out jsonProp); if (obj.ContainsKey(jsonKey)) throw new Exception("The given key is defined multiple times in the same type: " + input.GetType().Name + "." + jsonKey); object value = getter.Value(input); if (jsonProp == null || jsonProp.NullValueHandling == NullValueHandling.Include || value != null) obj.Add(jsonKey, value); } output = obj; return true; } } #if SIMPLE_JSON_DATACONTRACT [GeneratedCode("simple-json", "1.0.0")] #if SIMPLE_JSON_INTERNAL internal #else public #endif class DataContractJsonSerializerStrategy : PocoJsonSerializerStrategy { public DataContractJsonSerializerStrategy() { GetCache = new ReflectionUtils.ThreadSafeDictionary<Type, IDictionary<string, ReflectionUtils.GetDelegate>>(GetterValueFactory); SetCache = new ReflectionUtils.ThreadSafeDictionary<Type, IDictionary<string, KeyValuePair<Type, ReflectionUtils.SetDelegate>>>(SetterValueFactory); } internal override IDictionary<string, ReflectionUtils.GetDelegate> GetterValueFactory(Type type) { bool hasDataContract = ReflectionUtils.GetAttribute(type, typeof(DataContractAttribute)) != null; if (!hasDataContract) return base.GetterValueFactory(type); string jsonKey; IDictionary<string, ReflectionUtils.GetDelegate> result = new Dictionary<string, ReflectionUtils.GetDelegate>(); foreach (PropertyInfo propertyInfo in ReflectionUtils.GetProperties(type)) { if (propertyInfo.CanRead) { MethodInfo getMethod = ReflectionUtils.GetGetterMethodInfo(propertyInfo); if (!getMethod.IsStatic && CanAdd(propertyInfo, out jsonKey)) result[jsonKey] = ReflectionUtils.GetGetMethod(propertyInfo); } } foreach (FieldInfo fieldInfo in ReflectionUtils.GetFields(type)) { if (!fieldInfo.IsStatic && CanAdd(fieldInfo, out jsonKey)) result[jsonKey] = ReflectionUtils.GetGetMethod(fieldInfo); } return result; } internal override IDictionary<string, KeyValuePair<Type, ReflectionUtils.SetDelegate>> SetterValueFactory(Type type) { bool hasDataContract = ReflectionUtils.GetAttribute(type, typeof(DataContractAttribute)) != null; if (!hasDataContract) return base.SetterValueFactory(type); string jsonKey; IDictionary<string, KeyValuePair<Type, ReflectionUtils.SetDelegate>> result = new Dictionary<string, KeyValuePair<Type, ReflectionUtils.SetDelegate>>(); foreach (PropertyInfo propertyInfo in ReflectionUtils.GetProperties(type)) { if (propertyInfo.CanWrite) { MethodInfo setMethod = ReflectionUtils.GetSetterMethodInfo(propertyInfo); if (!setMethod.IsStatic && CanAdd(propertyInfo, out jsonKey)) result[jsonKey] = new KeyValuePair<Type, ReflectionUtils.SetDelegate>(propertyInfo.PropertyType, ReflectionUtils.GetSetMethod(propertyInfo)); } } foreach (FieldInfo fieldInfo in ReflectionUtils.GetFields(type)) { if (!fieldInfo.IsInitOnly && !fieldInfo.IsStatic && CanAdd(fieldInfo, out jsonKey)) result[jsonKey] = new KeyValuePair<Type, ReflectionUtils.SetDelegate>(fieldInfo.FieldType, ReflectionUtils.GetSetMethod(fieldInfo)); } // todo implement sorting for DATACONTRACT. return result; } private static bool CanAdd(MemberInfo info, out string jsonKey) { jsonKey = null; if (ReflectionUtils.GetAttribute(info, typeof(IgnoreDataMemberAttribute)) != null) return false; DataMemberAttribute dataMemberAttribute = (DataMemberAttribute)ReflectionUtils.GetAttribute(info, typeof(DataMemberAttribute)); if (dataMemberAttribute == null) return false; jsonKey = string.IsNullOrEmpty(dataMemberAttribute.Name) ? info.Name : dataMemberAttribute.Name; return true; } } #endif // This class is meant to be copied into other libraries. So we want to exclude it from Code Analysis rules // that might be in place in the target project. [GeneratedCode("reflection-utils", "1.0.0")] #if SIMPLE_JSON_REFLECTION_UTILS_PUBLIC public #else internal #endif class ReflectionUtils { private static readonly object[] EmptyObjects = new object[0]; public delegate object GetDelegate(object source); public delegate void SetDelegate(object source, object value); public delegate object ConstructorDelegate(params object[] args); public delegate TValue ThreadSafeDictionaryValueFactory<TKey, TValue>(TKey key); [ThreadStatic] private static object[] _1ObjArray; #if SIMPLE_JSON_TYPEINFO public static TypeInfo GetTypeInfo(Type type) { return type.GetTypeInfo(); } #else public static Type GetTypeInfo(Type type) { return type; } #endif public static Attribute GetAttribute(MemberInfo info, Type type) { #if SIMPLE_JSON_TYPEINFO if (info == null || type == null || !info.IsDefined(type)) return null; return info.GetCustomAttribute(type); #else if (info == null || type == null || !Attribute.IsDefined(info, type)) return null; return Attribute.GetCustomAttribute(info, type); #endif } public static Type GetGenericListElementType(Type type) { if (type == typeof(object)) return type; IEnumerable<Type> interfaces; #if SIMPLE_JSON_TYPEINFO interfaces = type.GetTypeInfo().ImplementedInterfaces; #else interfaces = type.GetInterfaces(); #endif foreach (Type implementedInterface in interfaces) { if (IsTypeGeneric(implementedInterface) && implementedInterface.GetGenericTypeDefinition() == typeof(IList<>)) { return GetGenericTypeArguments(implementedInterface)[0]; } } return GetGenericTypeArguments(type)[0]; } public static Attribute GetAttribute(Type objectType, Type attributeType) { #if SIMPLE_JSON_TYPEINFO if (objectType == null || attributeType == null || !objectType.GetTypeInfo().IsDefined(attributeType)) return null; return objectType.GetTypeInfo().GetCustomAttribute(attributeType); #else if (objectType == null || attributeType == null || !Attribute.IsDefined(objectType, attributeType)) return null; return Attribute.GetCustomAttribute(objectType, attributeType); #endif } public static Type[] GetGenericTypeArguments(Type type) { #if SIMPLE_JSON_TYPEINFO return type.GetTypeInfo().GenericTypeArguments; #else return type.GetGenericArguments(); #endif } public static bool IsTypeGeneric(Type type) { return GetTypeInfo(type).IsGenericType; } public static bool IsTypeGenericeCollectionInterface(Type type) { if (!IsTypeGeneric(type)) return false; Type genericDefinition = type.GetGenericTypeDefinition(); return (genericDefinition == typeof(IList<>) || genericDefinition == typeof(ICollection<>) || genericDefinition == typeof(IEnumerable<>) #if SIMPLE_JSON_READONLY_COLLECTIONS || genericDefinition == typeof(IReadOnlyCollection<>) || genericDefinition == typeof(IReadOnlyList<>) #endif ); } public static bool IsAssignableFrom(Type type1, Type type2) { return GetTypeInfo(type1).IsAssignableFrom(GetTypeInfo(type2)); } public static bool IsTypeDictionary(Type type) { #if SIMPLE_JSON_TYPEINFO if (typeof(IDictionary<,>).GetTypeInfo().IsAssignableFrom(type.GetTypeInfo())) return true; #else if (typeof(System.Collections.IDictionary).IsAssignableFrom(type)) return true; #endif if (!GetTypeInfo(type).IsGenericType) return false; Type genericDefinition = type.GetGenericTypeDefinition(); return genericDefinition == typeof(IDictionary<,>) || genericDefinition == typeof(Dictionary<,>); } public static bool IsNullableType(Type type) { return GetTypeInfo(type).IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>); } public static object ToNullableType(object obj, Type nullableType) { return obj == null ? null : Convert.ChangeType(obj, Nullable.GetUnderlyingType(nullableType), CultureInfo.InvariantCulture); } public static bool IsValueType(Type type) { return GetTypeInfo(type).IsValueType; } public static IEnumerable<ConstructorInfo> GetConstructors(Type type) { #if SIMPLE_JSON_TYPEINFO return type.GetTypeInfo().DeclaredConstructors; #else return type.GetConstructors(); #endif } public static ConstructorInfo GetConstructorInfo(Type type, params Type[] argsType) { IEnumerable<ConstructorInfo> constructorInfos = GetConstructors(type); int i; bool matches; foreach (ConstructorInfo constructorInfo in constructorInfos) { ParameterInfo[] parameters = constructorInfo.GetParameters(); if (argsType.Length != parameters.Length) continue; i = 0; matches = true; foreach (ParameterInfo parameterInfo in constructorInfo.GetParameters()) { if (parameterInfo.ParameterType != argsType[i]) { matches = false; break; } } if (matches) return constructorInfo; } return null; } public static IEnumerable<PropertyInfo> GetProperties(Type type) { #if SIMPLE_JSON_TYPEINFO return type.GetRuntimeProperties(); #else return type.GetProperties(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static); #endif } public static IEnumerable<FieldInfo> GetFields(Type type) { #if SIMPLE_JSON_TYPEINFO return type.GetRuntimeFields(); #else return type.GetFields(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static); #endif } public static MethodInfo GetGetterMethodInfo(PropertyInfo propertyInfo) { #if SIMPLE_JSON_TYPEINFO return propertyInfo.GetMethod; #else return propertyInfo.GetGetMethod(true); #endif } public static MethodInfo GetSetterMethodInfo(PropertyInfo propertyInfo) { #if SIMPLE_JSON_TYPEINFO return propertyInfo.SetMethod; #else return propertyInfo.GetSetMethod(true); #endif } public static ConstructorDelegate GetContructor(ConstructorInfo constructorInfo) { return GetConstructorByReflection(constructorInfo); } public static ConstructorDelegate GetContructor(Type type, params Type[] argsType) { return GetConstructorByReflection(type, argsType); } public static ConstructorDelegate GetConstructorByReflection(ConstructorInfo constructorInfo) { return delegate (object[] args) { var x = constructorInfo; return x.Invoke(args); }; } public static ConstructorDelegate GetConstructorByReflection(Type type, params Type[] argsType) { ConstructorInfo constructorInfo = GetConstructorInfo(type, argsType); return constructorInfo == null ? null : GetConstructorByReflection(constructorInfo); } public static GetDelegate GetGetMethod(PropertyInfo propertyInfo) { return GetGetMethodByReflection(propertyInfo); } public static GetDelegate GetGetMethod(FieldInfo fieldInfo) { return GetGetMethodByReflection(fieldInfo); } public static GetDelegate GetGetMethodByReflection(PropertyInfo propertyInfo) { MethodInfo methodInfo = GetGetterMethodInfo(propertyInfo); return delegate (object source) { return methodInfo.Invoke(source, EmptyObjects); }; } public static GetDelegate GetGetMethodByReflection(FieldInfo fieldInfo) { return delegate (object source) { return fieldInfo.GetValue(source); }; } public static SetDelegate GetSetMethod(PropertyInfo propertyInfo) { return GetSetMethodByReflection(propertyInfo); } public static SetDelegate GetSetMethod(FieldInfo fieldInfo) { return GetSetMethodByReflection(fieldInfo); } public static SetDelegate GetSetMethodByReflection(PropertyInfo propertyInfo) { MethodInfo methodInfo = GetSetterMethodInfo(propertyInfo); return delegate (object source, object value) { if (_1ObjArray == null) _1ObjArray = new object[1]; _1ObjArray[0] = value; methodInfo.Invoke(source, _1ObjArray); }; } public static SetDelegate GetSetMethodByReflection(FieldInfo fieldInfo) { return delegate (object source, object value) { fieldInfo.SetValue(source, value); }; } public sealed class ThreadSafeDictionary<TKey, TValue> : IDictionary<TKey, TValue> { private readonly object _lock = new object(); private readonly ThreadSafeDictionaryValueFactory<TKey, TValue> _valueFactory; private Dictionary<TKey, TValue> _dictionary; public ThreadSafeDictionary(ThreadSafeDictionaryValueFactory<TKey, TValue> valueFactory) { _valueFactory = valueFactory; } private TValue Get(TKey key) { if (_dictionary == null) return AddValue(key); TValue value; if (!_dictionary.TryGetValue(key, out value)) return AddValue(key); return value; } private TValue AddValue(TKey key) { TValue value = _valueFactory(key); lock (_lock) { if (_dictionary == null) { _dictionary = new Dictionary<TKey, TValue>(); _dictionary[key] = value; } else { TValue val; if (_dictionary.TryGetValue(key, out val)) return val; Dictionary<TKey, TValue> dict = new Dictionary<TKey, TValue>(_dictionary); dict[key] = value; _dictionary = dict; } } return value; } public void Add(TKey key, TValue value) { throw new NotImplementedException(); } public bool ContainsKey(TKey key) { return _dictionary.ContainsKey(key); } public ICollection<TKey> Keys { get { return _dictionary.Keys; } } public bool Remove(TKey key) { throw new NotImplementedException(); } public bool TryGetValue(TKey key, out TValue value) { value = this[key]; return true; } public ICollection<TValue> Values { get { return _dictionary.Values; } } public TValue this[TKey key] { get { return Get(key); } set { throw new NotImplementedException(); } } public void Add(KeyValuePair<TKey, TValue> item) { throw new NotImplementedException(); } public void Clear() { throw new NotImplementedException(); } public bool Contains(KeyValuePair<TKey, TValue> item) { throw new NotImplementedException(); } public void CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex) { throw new NotImplementedException(); } public int Count { get { return _dictionary.Count; } } public bool IsReadOnly { get { throw new NotImplementedException(); } } public bool Remove(KeyValuePair<TKey, TValue> item) { throw new NotImplementedException(); } public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator() { return _dictionary.GetEnumerator(); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return _dictionary.GetEnumerator(); } } } } // ReSharper restore LoopCanBeConvertedToQuery // ReSharper restore RedundantExplicitArrayCreation // ReSharper restore SuggestUseVarKeywordEvident
JoshuaStrunk/SDKGenerator
targets/unity-v2/source/Shared/Internal/SimpleJson.cs
C#
apache-2.0
83,283
package io.cattle.platform.networking.host.dao.impl; import static io.cattle.platform.core.model.tables.HostTable.*; import static io.cattle.platform.core.model.tables.HostVnetMapTable.*; import static io.cattle.platform.core.model.tables.SubnetVnetMapTable.*; import static io.cattle.platform.core.model.tables.VnetTable.*; import java.util.List; import io.cattle.platform.core.model.Host; import io.cattle.platform.core.model.HostVnetMap; import io.cattle.platform.core.model.Network; import io.cattle.platform.core.model.Subnet; import io.cattle.platform.core.model.SubnetVnetMap; import io.cattle.platform.core.model.Vnet; import io.cattle.platform.core.model.tables.records.VnetRecord; import io.cattle.platform.db.jooq.dao.impl.AbstractJooqDao; import io.cattle.platform.networking.host.contants.HostOnlyConstants; import io.cattle.platform.networking.host.dao.HostOnlyDao; import io.cattle.platform.object.ObjectManager; import javax.inject.Inject; import org.jooq.Record; public class HostOnlyDaoImpl extends AbstractJooqDao implements HostOnlyDao { ObjectManager objectManager; @Override public Vnet getVnetForHost(Network network, Host host) { Long physicalHostId = host.getPhysicalHostId(); Record record = null; if ( physicalHostId == null ) { record = create() .select(VNET.fields()) .from(VNET) .join(HOST_VNET_MAP) .on(HOST_VNET_MAP.VNET_ID.eq(VNET.ID)) .where(VNET.NETWORK_ID.eq(network.getId()) .and(HOST_VNET_MAP.HOST_ID.eq(host.getId())) .and(HOST_VNET_MAP.REMOVED.isNull())) .fetchAny(); } else { record = create() .select(VNET.fields()) .from(VNET) .join(HOST_VNET_MAP) .on(HOST_VNET_MAP.VNET_ID.eq(VNET.ID)) .join(HOST) .on(HOST_VNET_MAP.HOST_ID.eq(HOST.ID)) .where(VNET.NETWORK_ID.eq(network.getId()) .and(HOST.PHYSICAL_HOST_ID.eq(physicalHostId)) .and(HOST_VNET_MAP.REMOVED.isNull())) .fetchAny(); } return record == null ? null : record.into(VnetRecord.class); } @Override public Vnet createVnetForHost(Network network, Host host, Subnet subnet, String uri) { if ( uri == null ) { uri = HostOnlyConstants.DEFAULT_HOST_SUBNET_URI; } Vnet vnet = objectManager.create(Vnet.class, VNET.URI, uri, VNET.ACCOUNT_ID, network.getAccountId(), VNET.NETWORK_ID, network.getId()); objectManager.create(HostVnetMap.class, HOST_VNET_MAP.VNET_ID, vnet.getId(), HOST_VNET_MAP.HOST_ID, host.getId()); if ( subnet != null ) { objectManager.create(SubnetVnetMap.class, SUBNET_VNET_MAP.VNET_ID, vnet.getId(), SUBNET_VNET_MAP.SUBNET_ID, subnet.getId()); } return vnet; } @Override public HostVnetMap mapVnetToHost(Vnet vnet, Host host) { List<HostVnetMap> maps = objectManager.find(HostVnetMap.class, HOST_VNET_MAP.VNET_ID, vnet.getId(), HOST_VNET_MAP.HOST_ID, host.getId()); if ( maps.size() > 0 ) { return maps.get(0); } return objectManager.create(HostVnetMap.class, HOST_VNET_MAP.VNET_ID, vnet.getId(), HOST_VNET_MAP.HOST_ID, host.getId()); } public ObjectManager getObjectManager() { return objectManager; } @Inject public void setObjectManager(ObjectManager objectManager) { this.objectManager = objectManager; } }
stresler/cattle
code/implementation/host-only-network/src/main/java/io/cattle/platform/networking/host/dao/impl/HostOnlyDaoImpl.java
Java
apache-2.0
3,937
// // Copyright 2013 Uncodin, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #include "document.h" namespace Bypass { Document::Document() : elements() { } Document::~Document() { } void Document::append(const Element& element) { elements.push_back(Element(element)); } size_t Document::size() { return elements.size(); } Element Document::operator[](size_t i) { return elements[i]; } }
trello/bypass
src/document.cpp
C++
apache-2.0
940
/* * Copyright (C) 2015 Bilibili * Copyright (C) 2015 Zhang Rui <bbcallen@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tv.danmaku.ijk.media.player.misc; import android.annotation.TargetApi; import android.os.Build; import android.text.TextUtils; import java.util.HashMap; import java.util.Locale; import java.util.Map; import tv.danmaku.ijk.media.player.IjkMediaMeta; public class IjkMediaFormat implements IMediaFormat { // Common public static final String KEY_IJK_CODEC_LONG_NAME_UI = "ijk-codec-long-name-ui"; public static final String KEY_IJK_CODEC_NAME_UI = "ijk-codec-name-ui"; public static final String KEY_IJK_BIT_RATE_UI = "ijk-bit-rate-ui"; // Video public static final String KEY_IJK_CODEC_PROFILE_LEVEL_UI = "ijk-profile-level-ui"; public static final String KEY_IJK_CODEC_PIXEL_FORMAT_UI = "ijk-pixel-format-ui"; public static final String KEY_IJK_RESOLUTION_UI = "ijk-resolution-ui"; public static final String KEY_IJK_FRAME_RATE_UI = "ijk-frame-rate-ui"; // Audio public static final String KEY_IJK_SAMPLE_RATE_UI = "ijk-sample-rate-ui"; public static final String KEY_IJK_CHANNEL_UI = "ijk-channel-ui"; // Codec public static final String CODEC_NAME_H264 = "h264"; public final IjkMediaMeta.IjkStreamMeta mMediaFormat; public IjkMediaFormat(IjkMediaMeta.IjkStreamMeta streamMeta) { mMediaFormat = streamMeta; } @TargetApi(Build.VERSION_CODES.JELLY_BEAN) @Override public int getInteger(String name) { if (mMediaFormat == null) return 0; return mMediaFormat.getInt(name); } @Override public String getString(String name) { if (mMediaFormat == null) return null; if (sFormatterMap.containsKey(name)) { Formatter formatter = sFormatterMap.get(name); return formatter.format(this); } return mMediaFormat.getString(name); } //------------------------- // Formatter //------------------------- private static abstract class Formatter { public String format(IjkMediaFormat mediaFormat) { String value = doFormat(mediaFormat); if (TextUtils.isEmpty(value)) return getDefaultString(); return value; } protected abstract String doFormat(IjkMediaFormat mediaFormat); @SuppressWarnings("SameReturnValue") protected String getDefaultString() { return "N/A"; } } private static final Map<String, Formatter> sFormatterMap = new HashMap<String, Formatter>(); { sFormatterMap.put(KEY_IJK_CODEC_LONG_NAME_UI, new Formatter() { @Override public String doFormat(IjkMediaFormat mediaFormat) { return mMediaFormat.getString(IjkMediaMeta.IJKM_KEY_CODEC_LONG_NAME); } }); sFormatterMap.put(KEY_IJK_CODEC_NAME_UI, new Formatter() { @Override public String doFormat(IjkMediaFormat mediaFormat) { return mMediaFormat.getString(IjkMediaMeta.IJKM_KEY_CODEC_NAME); } }); sFormatterMap.put(KEY_IJK_BIT_RATE_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { int bitRate = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_BITRATE); if (bitRate <= 0) { return null; } else if (bitRate < 1000) { return String.format(Locale.US, "%d bit/s", bitRate); } else { return String.format(Locale.US, "%d kb/s", bitRate / 1000); } } }); sFormatterMap.put(KEY_IJK_CODEC_PROFILE_LEVEL_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { int profileIndex = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_CODEC_PROFILE_ID); String profile; switch (profileIndex) { case IjkMediaMeta.FF_PROFILE_H264_BASELINE: profile = "Baseline"; break; case IjkMediaMeta.FF_PROFILE_H264_CONSTRAINED_BASELINE: profile = "Constrained Baseline"; break; case IjkMediaMeta.FF_PROFILE_H264_MAIN: profile = "Main"; break; case IjkMediaMeta.FF_PROFILE_H264_EXTENDED: profile = "Extended"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH: profile = "High"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_10: profile = "High 10"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_10_INTRA: profile = "High 10 Intra"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_422: profile = "High 4:2:2"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_422_INTRA: profile = "High 4:2:2 Intra"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_444: profile = "High 4:4:4"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_444_PREDICTIVE: profile = "High 4:4:4 Predictive"; break; case IjkMediaMeta.FF_PROFILE_H264_HIGH_444_INTRA: profile = "High 4:4:4 Intra"; break; case IjkMediaMeta.FF_PROFILE_H264_CAVLC_444: profile = "CAVLC 4:4:4"; break; default: return null; } StringBuilder sb = new StringBuilder(); sb.append(profile); String codecName = mediaFormat.getString(IjkMediaMeta.IJKM_KEY_CODEC_NAME); if (!TextUtils.isEmpty(codecName) && codecName.equalsIgnoreCase(CODEC_NAME_H264)) { int level = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_CODEC_LEVEL); if (level < 10) return sb.toString(); sb.append(" Profile Level "); sb.append((level / 10) % 10); if ((level % 10) != 0) { sb.append("."); sb.append(level % 10); } } return sb.toString(); } }); sFormatterMap.put(KEY_IJK_CODEC_PIXEL_FORMAT_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { return mediaFormat.getString(IjkMediaMeta.IJKM_KEY_CODEC_PIXEL_FORMAT); } }); sFormatterMap.put(KEY_IJK_RESOLUTION_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { int width = mediaFormat.getInteger(KEY_WIDTH); int height = mediaFormat.getInteger(KEY_HEIGHT); int sarNum = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_SAR_NUM); int sarDen = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_SAR_DEN); if (width <= 0 || height <= 0) { return null; } else if (sarNum <= 0 || sarDen <= 0) { return String.format(Locale.US, "%d x %d", width, height); } else { return String.format(Locale.US, "%d x %d [SAR %d:%d]", width, height, sarNum, sarDen); } } }); sFormatterMap.put(KEY_IJK_FRAME_RATE_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { int fpsNum = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_FPS_NUM); int fpsDen = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_FPS_DEN); if (fpsNum <= 0 || fpsDen <= 0) { return null; } else { return String.valueOf(((float) (fpsNum)) / fpsDen); } } }); sFormatterMap.put(KEY_IJK_SAMPLE_RATE_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { int sampleRate = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_SAMPLE_RATE); if (sampleRate <= 0) { return null; } else { return String.format(Locale.US, "%d Hz", sampleRate); } } }); sFormatterMap.put(KEY_IJK_CHANNEL_UI, new Formatter() { @Override protected String doFormat(IjkMediaFormat mediaFormat) { int channelLayout = mediaFormat.getInteger(IjkMediaMeta.IJKM_KEY_CHANNEL_LAYOUT); if (channelLayout <= 0) { return null; } else { if (channelLayout == IjkMediaMeta.AV_CH_LAYOUT_MONO) { return "mono"; } else if (channelLayout == IjkMediaMeta.AV_CH_LAYOUT_STEREO) { return "stereo"; } else { return String.format(Locale.US, "%x", channelLayout); } } } }); } }
Kerr1Gan/ShareBox
ijkplayer-java/src/main/java/tv/danmaku/ijk/media/player/misc/IjkMediaFormat.java
Java
apache-2.0
10,341
/* * Copyright 2000-2014 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.client; import junit.framework.TestCase; import org.junit.Assert; import com.vaadin.client.componentlocator.LocatorUtil; /* * Test LocatorUtil.isUIElement() & isNotificaitonElement methods */ public class LocatorUtilTest extends TestCase { public void testIsUI1() { boolean isUI = LocatorUtil.isUIElement("com.vaadin.ui.UI"); Assert.assertTrue(isUI); } public void testIsUI2() { boolean isUI = LocatorUtil.isUIElement("/com.vaadin.ui.UI"); Assert.assertTrue(isUI); } public void testIsUI3() { boolean isUI = LocatorUtil .isUIElement("//com.vaadin.ui.UI[RandomString"); Assert.assertTrue(isUI); } public void testIsUI4() { boolean isUI = LocatorUtil.isUIElement("//com.vaadin.ui.UI[0]"); Assert.assertTrue(isUI); } public void testIsNotification1() { boolean isUI = LocatorUtil .isNotificationElement("com.vaadin.ui.VNotification"); Assert.assertTrue(isUI); } public void testIsNotification2() { boolean isUI = LocatorUtil .isNotificationElement("com.vaadin.ui.Notification"); Assert.assertTrue(isUI); } public void testIsNotification3() { boolean isUI = LocatorUtil .isNotificationElement("/com.vaadin.ui.VNotification["); Assert.assertTrue(isUI); } public void testIsNotification4() { boolean isUI = LocatorUtil .isNotificationElement("//com.vaadin.ui.VNotification[0]"); Assert.assertTrue(isUI); } }
udayinfy/vaadin
client/tests/src/com/vaadin/client/LocatorUtilTest.java
Java
apache-2.0
2,206
# # Description: Placeholder for service request validation #
maas-ufcg/manageiq
db/fixtures/ae_datastore/ManageIQ/Service/Provisioning/StateMachines/ServiceProvisionRequestApproval.class/__methods__/validate_request.rb
Ruby
apache-2.0
62
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. using Microsoft.Azure.Management.ResourceManager; using Microsoft.Azure.Management.ResourceManager.Models; using Microsoft.Azure.Management.Sql; using Microsoft.Azure.Management.Sql.Models; using System; using System.Collections.Generic; using System.Linq; using Xunit; namespace Sql.Tests { public class UsageScenarioTests { [Fact] public void TestGetSubscriptionUsageData() { using (SqlManagementTestContext context = new SqlManagementTestContext(this)) { SqlManagementClient sqlClient = context.GetClient<SqlManagementClient>(); // Get subscription usages for a location IEnumerable<SubscriptionUsage> subscriptionUsages = sqlClient.SubscriptionUsages.ListByLocation(TestEnvironmentUtilities.DefaultLocation); Assert.True(subscriptionUsages.Count() > 0); // Get a single subscription usage for a location SubscriptionUsage subscriptionUsage = sqlClient.SubscriptionUsages.Get(TestEnvironmentUtilities.DefaultLocation, "ServerQuota"); } } [Fact] public void TestGetUsageData() { using (SqlManagementTestContext context = new SqlManagementTestContext(this)) { ResourceGroup resourceGroup = context.CreateResourceGroup(); Server server = context.CreateServer(resourceGroup); SqlManagementClient sqlClient = context.GetClient<SqlManagementClient>(); // Get server Usages IEnumerable<ServerUsage> serverUsages = sqlClient.ServerUsages.ListByServer(resourceGroup.Name, server.Name); Assert.True(serverUsages.Count(s => s.ResourceName == server.Name) > 1); // Create a database and get usages string dbName = SqlManagementTestUtilities.GenerateName(); var dbInput = new Database() { Location = server.Location }; sqlClient.Databases.CreateOrUpdate(resourceGroup.Name, server.Name, dbName, dbInput); IEnumerable<DatabaseUsage> databaseUsages = sqlClient.DatabaseUsages.ListByDatabase(resourceGroup.Name, server.Name, dbName); Assert.True(databaseUsages.Where(db => db.ResourceName == dbName).Count() == 1); } } } }
SiddharthChatrolaMs/azure-sdk-for-net
src/SDKs/SqlManagement/Sql.Tests/UsageScenarioTests.cs
C#
apache-2.0
2,605
module Fog module Compute class Ecloud class Real basic_request :get_backup_internet_service end end end end
jreichhold/chef-repo
vendor/ruby/2.0.0/gems/fog-1.20.0/lib/fog/ecloud/requests/compute/get_backup_internet_service.rb
Ruby
apache-2.0
143
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.suggest; import org.apache.lucene.search.spell.DirectSpellChecker; import org.apache.lucene.search.spell.StringDistance; import org.apache.lucene.search.spell.SuggestMode; import org.apache.lucene.util.automaton.LevenshteinAutomata; public class DirectSpellcheckerSettings { // NB: If this changes, make sure to change the default in TermBuilderSuggester public static SuggestMode DEFAULT_SUGGEST_MODE = SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX; public static float DEFAULT_ACCURACY = 0.5f; public static SortBy DEFAULT_SORT = SortBy.SCORE; // NB: If this changes, make sure to change the default in TermBuilderSuggester public static StringDistance DEFAULT_STRING_DISTANCE = DirectSpellChecker.INTERNAL_LEVENSHTEIN; public static int DEFAULT_MAX_EDITS = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE; public static int DEFAULT_MAX_INSPECTIONS = 5; public static float DEFAULT_MAX_TERM_FREQ = 0.01f; public static int DEFAULT_PREFIX_LENGTH = 1; public static int DEFAULT_MIN_WORD_LENGTH = 4; public static float DEFAULT_MIN_DOC_FREQ = 0f; private SuggestMode suggestMode = DEFAULT_SUGGEST_MODE; private float accuracy = DEFAULT_ACCURACY; private SortBy sort = DEFAULT_SORT; private StringDistance stringDistance = DEFAULT_STRING_DISTANCE; private int maxEdits = DEFAULT_MAX_EDITS; private int maxInspections = DEFAULT_MAX_INSPECTIONS; private float maxTermFreq = DEFAULT_MAX_TERM_FREQ; private int prefixLength = DEFAULT_PREFIX_LENGTH; private int minWordLength = DEFAULT_MIN_WORD_LENGTH; private float minDocFreq = DEFAULT_MIN_DOC_FREQ; public SuggestMode suggestMode() { return suggestMode; } public void suggestMode(SuggestMode suggestMode) { this.suggestMode = suggestMode; } public float accuracy() { return accuracy; } public void accuracy(float accuracy) { this.accuracy = accuracy; } public SortBy sort() { return sort; } public void sort(SortBy sort) { this.sort = sort; } public StringDistance stringDistance() { return stringDistance; } public void stringDistance(StringDistance distance) { this.stringDistance = distance; } public int maxEdits() { return maxEdits; } public void maxEdits(int maxEdits) { this.maxEdits = maxEdits; } public int maxInspections() { return maxInspections; } public void maxInspections(int maxInspections) { this.maxInspections = maxInspections; } public float maxTermFreq() { return maxTermFreq; } public void maxTermFreq(float maxTermFreq) { this.maxTermFreq = maxTermFreq; } public int prefixLength() { return prefixLength; } public void prefixLength(int prefixLength) { this.prefixLength = prefixLength; } public int minWordLength() { return minWordLength; } public void minWordLength(int minWordLength) { this.minWordLength = minWordLength; } public float minDocFreq() { return minDocFreq; } public void minDocFreq(float minDocFreq) { this.minDocFreq = minDocFreq; } @Override public String toString() { return "[" + "suggestMode=" + suggestMode + ",sort=" + sort + ",stringDistance=" + stringDistance + ",accuracy=" + accuracy + ",maxEdits=" + maxEdits + ",maxInspections=" + maxInspections + ",maxTermFreq=" + maxTermFreq + ",prefixLength=" + prefixLength + ",minWordLength=" + minWordLength + ",minDocFreq=" + minDocFreq + "]"; } }
camilojd/elasticsearch
core/src/main/java/org/elasticsearch/search/suggest/DirectSpellcheckerSettings.java
Java
apache-2.0
4,660
//======================================================================== // //File: $RCSfile: AddToLayerAction.java,v $ //Version: $Revision: 1.4 $ //Modified: $Date: 2013/01/10 23:05:58 $ // //Copyright (c) 2005-2014 Mentor Graphics Corporation. All rights reserved. // //======================================================================== // Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy // of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. //======================================================================== package org.xtuml.bp.ui.graphics.actions; import java.util.ArrayList; import java.util.List; import org.eclipse.gef.GraphicalEditPart; import org.eclipse.gef.GraphicalViewer; import org.eclipse.jface.action.Action; import org.xtuml.bp.core.CorePlugin; import org.xtuml.bp.core.common.ClassQueryInterface_c; import org.xtuml.bp.core.common.Transaction; import org.xtuml.bp.core.common.TransactionManager; import org.xtuml.bp.ui.canvas.Connector_c; import org.xtuml.bp.ui.canvas.GraphicalElement_c; import org.xtuml.bp.ui.canvas.Graphicalelementinlayer_c; import org.xtuml.bp.ui.canvas.Layer_c; import org.xtuml.bp.ui.canvas.Model_c; import org.xtuml.bp.ui.canvas.Ooaofgraphics; import org.xtuml.bp.ui.canvas.Shape_c; import org.xtuml.bp.ui.graphics.editor.GraphicalEditor; import org.xtuml.bp.ui.graphics.parts.ConnectorEditPart; import org.xtuml.bp.ui.graphics.parts.ShapeEditPart; public class AddToLayerAction extends Action { private String layerName; private Model_c model; public AddToLayerAction(String layerName, Model_c model) { this.layerName = layerName; this.model = model; } @Override public void run() { Layer_c layer = Layer_c.getOneGD_LAYOnR34(model, new ClassQueryInterface_c() { @Override public boolean evaluate(Object candidate) { return ((Layer_c) candidate).getLayer_name().equals( layerName); } }); if (layer != null) { Transaction transaction = null; TransactionManager manager = TransactionManager.getSingleton(); try { transaction = manager.startTransaction( "Add element(s) to layer", Ooaofgraphics .getDefaultInstance()); List<GraphicalEditPart> selection = new ArrayList<GraphicalEditPart>(); GraphicalViewer viewer = GraphicalEditor.getEditor(model) .getGraphicalViewer(); for (Object selected : viewer.getSelectedEditParts()) { selection.add((GraphicalEditPart) selected); } for (GraphicalEditPart part : selection) { if (part instanceof ShapeEditPart || part instanceof ConnectorEditPart) { GraphicalElement_c elem = null; Object partModel = part.getModel(); if (partModel instanceof Connector_c) { elem = GraphicalElement_c .getOneGD_GEOnR2((Connector_c) partModel); } else { elem = GraphicalElement_c .getOneGD_GEOnR2((Shape_c) partModel); } if (elem != null) { // if this element already exists in the layer // skip, the tool allows this when at least one // selected element is not part of the layer Layer_c[] participatingLayers = Layer_c .getManyGD_LAYsOnR35(Graphicalelementinlayer_c .getManyGD_GLAYsOnR35(elem)); for(int i = 0; i < participatingLayers.length; i++) { if(participatingLayers[i] == layer) { continue; } } if (part instanceof ShapeEditPart) { ShapeEditPart shapePart = (ShapeEditPart) part; participatingLayers = shapePart .getInheritedLayers(); for (int i = 0; i < participatingLayers.length; i++) { if (participatingLayers[i] == layer) { continue; } } } if (part instanceof ConnectorEditPart) { ConnectorEditPart conPart = (ConnectorEditPart) part; participatingLayers = conPart .getInheritedLayers(); for (int i = 0; i < participatingLayers.length; i++) { if (participatingLayers[i] == layer) { continue; } } } layer.Addelementtolayer(elem.getElementid()); } if(!layer.getVisible()) { // see if the part also belongs to any // visible layers, otherwise de-select Layer_c[] existingLayers = Layer_c .getManyGD_LAYsOnR35(Graphicalelementinlayer_c .getManyGD_GLAYsOnR35(elem)); boolean participatesInVisibleLayer = false; for(int i = 0; i < existingLayers.length; i++) { if(existingLayers[i].getVisible()) { participatesInVisibleLayer = true; break; } } if(!participatesInVisibleLayer) { viewer.deselect(part); } } } } manager.endTransaction(transaction); } catch (Exception e) { if (transaction != null) { manager.cancelTransaction(transaction, e); } CorePlugin.logError("Unable to add element to layer.", e); } } } }
lwriemen/bridgepoint
src/org.xtuml.bp.ui.graphics/src/org/xtuml/bp/ui/graphics/actions/AddToLayerAction.java
Java
apache-2.0
5,561
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; /** * <p> * Contains the output of DescribeSpotInstanceRequests. * </p> */ public class DescribeSpotInstanceRequestsResult implements Serializable, Cloneable { /** * One or more Spot Instance requests. */ private com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest> spotInstanceRequests; /** * One or more Spot Instance requests. * * @return One or more Spot Instance requests. */ public java.util.List<SpotInstanceRequest> getSpotInstanceRequests() { if (spotInstanceRequests == null) { spotInstanceRequests = new com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest>(); spotInstanceRequests.setAutoConstruct(true); } return spotInstanceRequests; } /** * One or more Spot Instance requests. * * @param spotInstanceRequests One or more Spot Instance requests. */ public void setSpotInstanceRequests(java.util.Collection<SpotInstanceRequest> spotInstanceRequests) { if (spotInstanceRequests == null) { this.spotInstanceRequests = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest> spotInstanceRequestsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest>(spotInstanceRequests.size()); spotInstanceRequestsCopy.addAll(spotInstanceRequests); this.spotInstanceRequests = spotInstanceRequestsCopy; } /** * One or more Spot Instance requests. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setSpotInstanceRequests(java.util.Collection)} or * {@link #withSpotInstanceRequests(java.util.Collection)} if you want to * override the existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param spotInstanceRequests One or more Spot Instance requests. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeSpotInstanceRequestsResult withSpotInstanceRequests(SpotInstanceRequest... spotInstanceRequests) { if (getSpotInstanceRequests() == null) setSpotInstanceRequests(new java.util.ArrayList<SpotInstanceRequest>(spotInstanceRequests.length)); for (SpotInstanceRequest value : spotInstanceRequests) { getSpotInstanceRequests().add(value); } return this; } /** * One or more Spot Instance requests. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param spotInstanceRequests One or more Spot Instance requests. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeSpotInstanceRequestsResult withSpotInstanceRequests(java.util.Collection<SpotInstanceRequest> spotInstanceRequests) { if (spotInstanceRequests == null) { this.spotInstanceRequests = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest> spotInstanceRequestsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest>(spotInstanceRequests.size()); spotInstanceRequestsCopy.addAll(spotInstanceRequests); this.spotInstanceRequests = spotInstanceRequestsCopy; } return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSpotInstanceRequests() != null) sb.append("SpotInstanceRequests: " + getSpotInstanceRequests() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSpotInstanceRequests() == null) ? 0 : getSpotInstanceRequests().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeSpotInstanceRequestsResult == false) return false; DescribeSpotInstanceRequestsResult other = (DescribeSpotInstanceRequestsResult)obj; if (other.getSpotInstanceRequests() == null ^ this.getSpotInstanceRequests() == null) return false; if (other.getSpotInstanceRequests() != null && other.getSpotInstanceRequests().equals(this.getSpotInstanceRequests()) == false) return false; return true; } @Override public DescribeSpotInstanceRequestsResult clone() { try { return (DescribeSpotInstanceRequestsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
sheofir/aws-sdk-java
aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/DescribeSpotInstanceRequestsResult.java
Java
apache-2.0
6,035
package org.asteriskjava.manager.event; public class DongleCENDEvent extends ManagerEvent { private static final long serialVersionUID = 3257845467831284784L; private String device; private String endstatus; private String cccause; private String duration; private String callidx; public DongleCENDEvent(Object source) { super(source); } public String getDevice() { return this.device; } public void setDevice(String device) { this.device = device; } public String getCallidx() { return callidx; } public void setCallidx(String callidx) { this.callidx = callidx; } public String getCccause() { return cccause; } public void setCccause(String cccause) { this.cccause = cccause; } public String getDuration() { return duration; } public void setDuration(String duration) { this.duration = duration; } public String getEndstatus() { return endstatus; } public void setEndstatus(String endstatus) { this.endstatus = endstatus; } }
seanbright/asterisk-java
src/main/java/org/asteriskjava/manager/event/DongleCENDEvent.java
Java
apache-2.0
1,118
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import shutil import tempfile from telemetry import decorators from telemetry.testing import options_for_unittests from telemetry.testing import page_test_test_case from measurements import skpicture_printer class SkpicturePrinterUnitTest(page_test_test_case.PageTestTestCase): def setUp(self): self._options = options_for_unittests.GetCopy() self._skp_outdir = tempfile.mkdtemp('_skp_test') def tearDown(self): shutil.rmtree(self._skp_outdir) @decorators.Disabled('android') def testSkpicturePrinter(self): ps = self.CreateStorySetFromFileInUnittestDataDir('blank.html') measurement = skpicture_printer.SkpicturePrinter(self._skp_outdir) results = self.RunMeasurement(measurement, ps, options=self._options) # Picture printing is not supported on all platforms. if results.failures: assert 'not supported' in results.failures[0].exc_info[1].message return saved_picture_count = results.FindAllPageSpecificValuesNamed( 'saved_picture_count') self.assertEquals(len(saved_picture_count), 1) self.assertGreater(saved_picture_count[0].GetRepresentativeNumber(), 0)
axinging/chromium-crosswalk
tools/perf/measurements/skpicture_printer_unittest.py
Python
bsd-3-clause
1,305
/* * Copyright (C) 2012 Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /** * @constructor * @param {!Element} relativeToElement * @param {!WebInspector.DialogDelegate} delegate */ WebInspector.Dialog = function(relativeToElement, delegate) { this._delegate = delegate; this._relativeToElement = relativeToElement; this._glassPane = new WebInspector.GlassPane(/** @type {!Document} */ (relativeToElement.ownerDocument)); WebInspector.GlassPane.DefaultFocusedViewStack.push(this); // Install glass pane capturing events. this._glassPane.element.tabIndex = 0; this._glassPane.element.addEventListener("focus", this._onGlassPaneFocus.bind(this), false); this._element = this._glassPane.element.createChild("div"); this._element.tabIndex = 0; this._element.addEventListener("focus", this._onFocus.bind(this), false); this._element.addEventListener("keydown", this._onKeyDown.bind(this), false); this._closeKeys = [ WebInspector.KeyboardShortcut.Keys.Enter.code, WebInspector.KeyboardShortcut.Keys.Esc.code, ]; delegate.show(this._element); this._position(); this._delegate.focus(); } /** * @return {?WebInspector.Dialog} */ WebInspector.Dialog.currentInstance = function() { return WebInspector.Dialog._instance; } /** * @param {!Element} relativeToElement * @param {!WebInspector.DialogDelegate} delegate */ WebInspector.Dialog.show = function(relativeToElement, delegate) { if (WebInspector.Dialog._instance) return; WebInspector.Dialog._instance = new WebInspector.Dialog(relativeToElement, delegate); } WebInspector.Dialog.hide = function() { if (!WebInspector.Dialog._instance) return; WebInspector.Dialog._instance._hide(); } WebInspector.Dialog.prototype = { focus: function() { this._element.focus(); }, _hide: function() { if (this._isHiding) return; this._isHiding = true; this._delegate.willHide(); delete WebInspector.Dialog._instance; WebInspector.GlassPane.DefaultFocusedViewStack.pop(); this._glassPane.dispose(); }, _onGlassPaneFocus: function(event) { this._hide(); }, _onFocus: function(event) { this._delegate.focus(); }, _position: function() { this._delegate.position(this._element, this._relativeToElement); }, _onKeyDown: function(event) { if (event.keyCode === WebInspector.KeyboardShortcut.Keys.Tab.code) { event.preventDefault(); return; } if (event.keyCode === WebInspector.KeyboardShortcut.Keys.Enter.code) this._delegate.onEnter(event); if (!event.handled && this._closeKeys.indexOf(event.keyCode) >= 0) { this._hide(); event.consume(true); } } }; /** * @constructor * @extends {WebInspector.Object} */ WebInspector.DialogDelegate = function() { /** @type {!Element} */ this.element; } WebInspector.DialogDelegate.prototype = { /** * @param {!Element} element */ show: function(element) { element.appendChild(this.element); this.element.classList.add("dialog-contents"); element.classList.add("dialog"); }, /** * @param {!Element} element * @param {!Element} relativeToElement */ position: function(element, relativeToElement) { var container = WebInspector.Dialog._modalHostView.element; var box = relativeToElement.boxInWindow(window).relativeToElement(container); var positionX = box.x + (relativeToElement.offsetWidth - element.offsetWidth) / 2; positionX = Number.constrain(positionX, 0, container.offsetWidth - element.offsetWidth); var positionY = box.y + (relativeToElement.offsetHeight - element.offsetHeight) / 2; positionY = Number.constrain(positionY, 0, container.offsetHeight - element.offsetHeight); element.style.position = "absolute"; element.positionAt(positionX, positionY, container); }, focus: function() { }, onEnter: function(event) { }, willHide: function() { }, __proto__: WebInspector.Object.prototype } /** @type {?WebInspector.View} */ WebInspector.Dialog._modalHostView = null; /** * @param {!WebInspector.View} view */ WebInspector.Dialog.setModalHostView = function(view) { WebInspector.Dialog._modalHostView = view; }; /** * FIXME: make utility method in Dialog, so clients use it instead of this getter. * Method should be like Dialog.showModalElement(position params, reposition callback). * @return {?WebInspector.View} */ WebInspector.Dialog.modalHostView = function() { return WebInspector.Dialog._modalHostView; }; WebInspector.Dialog.modalHostRepositioned = function() { if (WebInspector.Dialog._instance) WebInspector.Dialog._instance._position(); };
CTSRD-SOAAP/chromium-42.0.2311.135
third_party/WebKit/Source/devtools/front_end/ui/Dialog.js
JavaScript
bsd-3-clause
6,428
//===- ModuleFile.cpp - Module description --------------------------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // // This file implements the ModuleFile class, which describes a module that // has been loaded from an AST file. // //===----------------------------------------------------------------------===// #include "clang/Serialization/ModuleFile.h" #include "ASTReaderInternals.h" #include "clang/Serialization/ContinuousRangeMap.h" #include "llvm/ADT/StringRef.h" #include "llvm/Support/Compiler.h" #include "llvm/Support/raw_ostream.h" using namespace clang; using namespace serialization; using namespace reader; ModuleFile::~ModuleFile() { delete static_cast<ASTIdentifierLookupTable *>(IdentifierLookupTable); delete static_cast<HeaderFileInfoLookupTable *>(HeaderFileInfoTable); delete static_cast<ASTSelectorLookupTable *>(SelectorLookupTable); } template<typename Key, typename Offset, unsigned InitialCapacity> static void dumpLocalRemap(StringRef Name, const ContinuousRangeMap<Key, Offset, InitialCapacity> &Map) { if (Map.begin() == Map.end()) return; using MapType = ContinuousRangeMap<Key, Offset, InitialCapacity>; llvm::errs() << " " << Name << ":\n"; for (typename MapType::const_iterator I = Map.begin(), IEnd = Map.end(); I != IEnd; ++I) { llvm::errs() << " " << I->first << " -> " << I->second << "\n"; } } LLVM_DUMP_METHOD void ModuleFile::dump() { llvm::errs() << "\nModule: " << FileName << "\n"; if (!Imports.empty()) { llvm::errs() << " Imports: "; for (unsigned I = 0, N = Imports.size(); I != N; ++I) { if (I) llvm::errs() << ", "; llvm::errs() << Imports[I]->FileName; } llvm::errs() << "\n"; } // Remapping tables. llvm::errs() << " Base source location offset: " << SLocEntryBaseOffset << '\n'; dumpLocalRemap("Source location offset local -> global map", SLocRemap); llvm::errs() << " Base identifier ID: " << BaseIdentifierID << '\n' << " Number of identifiers: " << LocalNumIdentifiers << '\n'; dumpLocalRemap("Identifier ID local -> global map", IdentifierRemap); llvm::errs() << " Base macro ID: " << BaseMacroID << '\n' << " Number of macros: " << LocalNumMacros << '\n'; dumpLocalRemap("Macro ID local -> global map", MacroRemap); llvm::errs() << " Base submodule ID: " << BaseSubmoduleID << '\n' << " Number of submodules: " << LocalNumSubmodules << '\n'; dumpLocalRemap("Submodule ID local -> global map", SubmoduleRemap); llvm::errs() << " Base selector ID: " << BaseSelectorID << '\n' << " Number of selectors: " << LocalNumSelectors << '\n'; dumpLocalRemap("Selector ID local -> global map", SelectorRemap); llvm::errs() << " Base preprocessed entity ID: " << BasePreprocessedEntityID << '\n' << " Number of preprocessed entities: " << NumPreprocessedEntities << '\n'; dumpLocalRemap("Preprocessed entity ID local -> global map", PreprocessedEntityRemap); llvm::errs() << " Base type index: " << BaseTypeIndex << '\n' << " Number of types: " << LocalNumTypes << '\n'; dumpLocalRemap("Type index local -> global map", TypeRemap); llvm::errs() << " Base decl ID: " << BaseDeclID << '\n' << " Number of decls: " << LocalNumDecls << '\n'; dumpLocalRemap("Decl ID local -> global map", DeclRemap); }
endlessm/chromium-browser
third_party/llvm/clang/lib/Serialization/ModuleFile.cpp
C++
bsd-3-clause
3,727
// -*- Mode: C++; c-basic-offset: 2; indent-tabs-mode: nil -*- /* Copyright (c) 2007, Google Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * --- * Author: Joi Sigurdsson * Author: Scott Francis * * Implementation of PreamblePatcher */ #include "preamble_patcher.h" #include "mini_disassembler.h" // compatibility shims #include "base/logging.h" // Definitions of assembly statements we need #define ASM_JMP32REL 0xE9 #define ASM_INT3 0xCC #define ASM_JMP32ABS_0 0xFF #define ASM_JMP32ABS_1 0x25 #define ASM_JMP8REL 0xEB #define ASM_JCC32REL_0 0x0F #define ASM_JCC32REL_1_MASK 0x80 #define ASM_NOP 0x90 // X64 opcodes #define ASM_REXW 0x48 #define ASM_MOVRAX_IMM 0xB8 #define ASM_JMP 0xFF #define ASM_JMP_RAX 0xE0 namespace sidestep { PreamblePatcher::PreamblePage* PreamblePatcher::preamble_pages_ = NULL; long PreamblePatcher::granularity_ = 0; long PreamblePatcher::pagesize_ = 0; bool PreamblePatcher::initialized_ = false; static const unsigned int kPreamblePageMagic = 0x4347414D; // "MAGC" // Handle a special case that we see with functions that point into an // IAT table (including functions linked statically into the // application): these function already starts with ASM_JMP32*. For // instance, malloc() might be implemented as a JMP to __malloc(). // This function follows the initial JMPs for us, until we get to the // place where the actual code is defined. If we get to STOP_BEFORE, // we return the address before stop_before. The stop_before_trampoline // flag is used in 64-bit mode. If true, we will return the address // before a trampoline is detected. Trampolines are defined as: // // nop // mov rax, <replacement_function> // jmp rax // // See PreamblePatcher::RawPatchWithStub for more information. void* PreamblePatcher::ResolveTargetImpl(unsigned char* target, unsigned char* stop_before, bool stop_before_trampoline) { if (target == NULL) return NULL; while (1) { unsigned char* new_target; if (target[0] == ASM_JMP32REL) { // target[1-4] holds the place the jmp goes to, but it's // relative to the next instruction. int relative_offset; // Windows guarantees int is 4 bytes SIDESTEP_ASSERT(sizeof(relative_offset) == 4); memcpy(reinterpret_cast<void*>(&relative_offset), reinterpret_cast<void*>(target + 1), 4); new_target = target + 5 + relative_offset; } else if (target[0] == ASM_JMP8REL) { // Visual Studio 7.1 implements new[] as an 8 bit jump to new signed char relative_offset; memcpy(reinterpret_cast<void*>(&relative_offset), reinterpret_cast<void*>(target + 1), 1); new_target = target + 2 + relative_offset; } else if (target[0] == ASM_JMP32ABS_0 && target[1] == ASM_JMP32ABS_1) { jmp32rel: // Visual studio seems to sometimes do it this way instead of the // previous way. Not sure what the rules are, but it was happening // with operator new in some binaries. void** new_target_v; if (kIs64BitBinary) { // In 64-bit mode JMPs are RIP-relative, not absolute int target_offset; memcpy(reinterpret_cast<void*>(&target_offset), reinterpret_cast<void*>(target + 2), 4); new_target_v = reinterpret_cast<void**>(target + target_offset + 6); } else { SIDESTEP_ASSERT(sizeof(new_target) == 4); memcpy(&new_target_v, reinterpret_cast<void*>(target + 2), 4); } new_target = reinterpret_cast<unsigned char*>(*new_target_v); } else if (kIs64BitBinary && target[0] == ASM_REXW && target[1] == ASM_JMP32ABS_0 && target[2] == ASM_JMP32ABS_1) { // in Visual Studio 2012 we're seeing jump like that: // rex.W jmpq *0x11d019(%rip) // // according to docs I have, rex prefix is actually unneeded and // can be ignored. I.e. docs say for jumps like that operand // already defaults to 64-bit. But clearly it breaks abs. jump // detection above and we just skip rex target++; goto jmp32rel; } else { break; } if (new_target == stop_before) break; if (stop_before_trampoline && *new_target == ASM_NOP && new_target[1] == ASM_REXW && new_target[2] == ASM_MOVRAX_IMM) break; target = new_target; } return target; } // Special case scoped_ptr to avoid dependency on scoped_ptr below. class DeleteUnsignedCharArray { public: DeleteUnsignedCharArray(unsigned char* array) : array_(array) { } ~DeleteUnsignedCharArray() { if (array_) { PreamblePatcher::FreePreambleBlock(array_); } } unsigned char* Release() { unsigned char* temp = array_; array_ = NULL; return temp; } private: unsigned char* array_; }; SideStepError PreamblePatcher::RawPatchWithStubAndProtections( void* target_function, void *replacement_function, unsigned char* preamble_stub, unsigned long stub_size, unsigned long* bytes_needed) { // We need to be able to write to a process-local copy of the first // MAX_PREAMBLE_STUB_SIZE bytes of target_function DWORD old_target_function_protect = 0; BOOL succeeded = ::VirtualProtect(reinterpret_cast<void*>(target_function), MAX_PREAMBLE_STUB_SIZE, PAGE_EXECUTE_READWRITE, &old_target_function_protect); if (!succeeded) { SIDESTEP_ASSERT(false && "Failed to make page containing target function " "copy-on-write."); return SIDESTEP_ACCESS_DENIED; } SideStepError error_code = RawPatchWithStub(target_function, replacement_function, preamble_stub, stub_size, bytes_needed); // Restore the protection of the first MAX_PREAMBLE_STUB_SIZE bytes of // pTargetFunction to what they were before we started goofing around. // We do this regardless of whether the patch succeeded or not. succeeded = ::VirtualProtect(reinterpret_cast<void*>(target_function), MAX_PREAMBLE_STUB_SIZE, old_target_function_protect, &old_target_function_protect); if (!succeeded) { SIDESTEP_ASSERT(false && "Failed to restore protection to target function."); // We must not return an error here because the function has // likely actually been patched, and returning an error might // cause our client code not to unpatch it. So we just keep // going. } if (SIDESTEP_SUCCESS != error_code) { // Testing RawPatchWithStub, above SIDESTEP_ASSERT(false); return error_code; } // Flush the instruction cache to make sure the processor doesn't execute the // old version of the instructions (before our patch). // // FlushInstructionCache is actually a no-op at least on // single-processor XP machines. I'm not sure why this is so, but // it is, yet I want to keep the call to the API here for // correctness in case there is a difference in some variants of // Windows/hardware. succeeded = ::FlushInstructionCache(::GetCurrentProcess(), target_function, MAX_PREAMBLE_STUB_SIZE); if (!succeeded) { SIDESTEP_ASSERT(false && "Failed to flush instruction cache."); // We must not return an error here because the function has actually // been patched, and returning an error would likely cause our client // code not to unpatch it. So we just keep going. } return SIDESTEP_SUCCESS; } SideStepError PreamblePatcher::RawPatch(void* target_function, void* replacement_function, void** original_function_stub) { if (!target_function || !replacement_function || !original_function_stub || (*original_function_stub) || target_function == replacement_function) { SIDESTEP_ASSERT(false && "Preconditions not met"); return SIDESTEP_INVALID_PARAMETER; } BOOL succeeded = FALSE; // First, deal with a special case that we see with functions that // point into an IAT table (including functions linked statically // into the application): these function already starts with // ASM_JMP32REL. For instance, malloc() might be implemented as a // JMP to __malloc(). In that case, we replace the destination of // the JMP (__malloc), rather than the JMP itself (malloc). This // way we get the correct behavior no matter how malloc gets called. void* new_target = ResolveTarget(target_function); if (new_target != target_function) { target_function = new_target; } // In 64-bit mode, preamble_stub must be within 2GB of target function // so that if target contains a jump, we can translate it. unsigned char* preamble_stub = AllocPreambleBlockNear(target_function); if (!preamble_stub) { SIDESTEP_ASSERT(false && "Unable to allocate preamble-stub."); return SIDESTEP_INSUFFICIENT_BUFFER; } // Frees the array at end of scope. DeleteUnsignedCharArray guard_preamble_stub(preamble_stub); SideStepError error_code = RawPatchWithStubAndProtections( target_function, replacement_function, preamble_stub, MAX_PREAMBLE_STUB_SIZE, NULL); if (SIDESTEP_SUCCESS != error_code) { SIDESTEP_ASSERT(false); return error_code; } // Flush the instruction cache to make sure the processor doesn't execute the // old version of the instructions (before our patch). // // FlushInstructionCache is actually a no-op at least on // single-processor XP machines. I'm not sure why this is so, but // it is, yet I want to keep the call to the API here for // correctness in case there is a difference in some variants of // Windows/hardware. succeeded = ::FlushInstructionCache(::GetCurrentProcess(), target_function, MAX_PREAMBLE_STUB_SIZE); if (!succeeded) { SIDESTEP_ASSERT(false && "Failed to flush instruction cache."); // We must not return an error here because the function has actually // been patched, and returning an error would likely cause our client // code not to unpatch it. So we just keep going. } SIDESTEP_LOG("PreamblePatcher::RawPatch successfully patched."); // detach the scoped pointer so the memory is not freed *original_function_stub = reinterpret_cast<void*>(guard_preamble_stub.Release()); return SIDESTEP_SUCCESS; } SideStepError PreamblePatcher::Unpatch(void* target_function, void* replacement_function, void* original_function_stub) { SIDESTEP_ASSERT(target_function && replacement_function && original_function_stub); if (!target_function || !replacement_function || !original_function_stub) { return SIDESTEP_INVALID_PARAMETER; } // Before unpatching, target_function should be a JMP to // replacement_function. If it's not, then either it's an error, or // we're falling into the case where the original instruction was a // JMP, and we patched the jumped_to address rather than the JMP // itself. (For instance, if malloc() is just a JMP to __malloc(), // we patched __malloc() and not malloc().) unsigned char* target = reinterpret_cast<unsigned char*>(target_function); target = reinterpret_cast<unsigned char*>( ResolveTargetImpl( target, reinterpret_cast<unsigned char*>(replacement_function), true)); // We should end at the function we patched. When we patch, we insert // a ASM_JMP32REL instruction, so look for that as a sanity check. if (target[0] != ASM_JMP32REL) { SIDESTEP_ASSERT(false && "target_function does not look like it was patched."); return SIDESTEP_INVALID_PARAMETER; } const unsigned int kRequiredTargetPatchBytes = 5; // We need to be able to write to a process-local copy of the first // kRequiredTargetPatchBytes bytes of target_function DWORD old_target_function_protect = 0; BOOL succeeded = ::VirtualProtect(reinterpret_cast<void*>(target), kRequiredTargetPatchBytes, PAGE_EXECUTE_READWRITE, &old_target_function_protect); if (!succeeded) { SIDESTEP_ASSERT(false && "Failed to make page containing target function " "copy-on-write."); return SIDESTEP_ACCESS_DENIED; } unsigned char* preamble_stub = reinterpret_cast<unsigned char*>( original_function_stub); // Disassemble the preamble of stub and copy the bytes back to target. // If we've done any conditional jumps in the preamble we need to convert // them back to the original REL8 jumps in the target. MiniDisassembler disassembler; unsigned int preamble_bytes = 0; unsigned int target_bytes = 0; while (target_bytes < kRequiredTargetPatchBytes) { unsigned int cur_bytes = 0; InstructionType instruction_type = disassembler.Disassemble(preamble_stub + preamble_bytes, cur_bytes); if (IT_JUMP == instruction_type) { unsigned int jump_bytes = 0; SideStepError jump_ret = SIDESTEP_JUMP_INSTRUCTION; if (IsNearConditionalJump(preamble_stub + preamble_bytes, cur_bytes) || IsNearRelativeJump(preamble_stub + preamble_bytes, cur_bytes) || IsNearAbsoluteCall(preamble_stub + preamble_bytes, cur_bytes) || IsNearRelativeCall(preamble_stub + preamble_bytes, cur_bytes)) { jump_ret = PatchNearJumpOrCall(preamble_stub + preamble_bytes, cur_bytes, target + target_bytes, &jump_bytes, MAX_PREAMBLE_STUB_SIZE); } if (jump_ret == SIDESTEP_JUMP_INSTRUCTION) { SIDESTEP_ASSERT(false && "Found unsupported jump instruction in stub!!"); return SIDESTEP_UNSUPPORTED_INSTRUCTION; } target_bytes += jump_bytes; } else if (IT_GENERIC == instruction_type) { if (IsMovWithDisplacement(preamble_stub + preamble_bytes, cur_bytes)) { unsigned int mov_bytes = 0; if (PatchMovWithDisplacement(preamble_stub + preamble_bytes, cur_bytes, target + target_bytes, &mov_bytes, MAX_PREAMBLE_STUB_SIZE) != SIDESTEP_SUCCESS) { SIDESTEP_ASSERT(false && "Found unsupported generic instruction in stub!!"); return SIDESTEP_UNSUPPORTED_INSTRUCTION; } } else { memcpy(reinterpret_cast<void*>(target + target_bytes), reinterpret_cast<void*>(reinterpret_cast<unsigned char*>( original_function_stub) + preamble_bytes), cur_bytes); target_bytes += cur_bytes; } } else { SIDESTEP_ASSERT(false && "Found unsupported instruction in stub!!"); return SIDESTEP_UNSUPPORTED_INSTRUCTION; } preamble_bytes += cur_bytes; } FreePreambleBlock(reinterpret_cast<unsigned char*>(original_function_stub)); // Restore the protection of the first kRequiredTargetPatchBytes bytes of // target to what they were before we started goofing around. succeeded = ::VirtualProtect(reinterpret_cast<void*>(target), kRequiredTargetPatchBytes, old_target_function_protect, &old_target_function_protect); // Flush the instruction cache to make sure the processor doesn't execute the // old version of the instructions (before our patch). // // See comment on FlushInstructionCache elsewhere in this file. succeeded = ::FlushInstructionCache(::GetCurrentProcess(), target, MAX_PREAMBLE_STUB_SIZE); if (!succeeded) { SIDESTEP_ASSERT(false && "Failed to flush instruction cache."); return SIDESTEP_UNEXPECTED; } SIDESTEP_LOG("PreamblePatcher::Unpatch successfully unpatched."); return SIDESTEP_SUCCESS; } void PreamblePatcher::Initialize() { if (!initialized_) { SYSTEM_INFO si = { 0 }; ::GetSystemInfo(&si); granularity_ = si.dwAllocationGranularity; pagesize_ = si.dwPageSize; initialized_ = true; } } unsigned char* PreamblePatcher::AllocPreambleBlockNear(void* target) { PreamblePage* preamble_page = preamble_pages_; while (preamble_page != NULL) { if (preamble_page->free_ != NULL) { __int64 val = reinterpret_cast<__int64>(preamble_page) - reinterpret_cast<__int64>(target); if ((val > 0 && val + pagesize_ <= INT_MAX) || (val < 0 && val >= INT_MIN)) { break; } } preamble_page = preamble_page->next_; } // The free_ member of the page is used to store the next available block // of memory to use or NULL if there are no chunks available, in which case // we'll allocate a new page. if (preamble_page == NULL || preamble_page->free_ == NULL) { // Create a new preamble page and initialize the free list preamble_page = reinterpret_cast<PreamblePage*>(AllocPageNear(target)); SIDESTEP_ASSERT(preamble_page != NULL && "Could not allocate page!"); void** pp = &preamble_page->free_; unsigned char* ptr = reinterpret_cast<unsigned char*>(preamble_page) + MAX_PREAMBLE_STUB_SIZE; unsigned char* limit = reinterpret_cast<unsigned char*>(preamble_page) + pagesize_; while (ptr < limit) { *pp = ptr; pp = reinterpret_cast<void**>(ptr); ptr += MAX_PREAMBLE_STUB_SIZE; } *pp = NULL; // Insert the new page into the list preamble_page->magic_ = kPreamblePageMagic; preamble_page->next_ = preamble_pages_; preamble_pages_ = preamble_page; } unsigned char* ret = reinterpret_cast<unsigned char*>(preamble_page->free_); preamble_page->free_ = *(reinterpret_cast<void**>(preamble_page->free_)); return ret; } void PreamblePatcher::FreePreambleBlock(unsigned char* block) { SIDESTEP_ASSERT(block != NULL); SIDESTEP_ASSERT(granularity_ != 0); uintptr_t ptr = reinterpret_cast<uintptr_t>(block); ptr -= ptr & (granularity_ - 1); PreamblePage* preamble_page = reinterpret_cast<PreamblePage*>(ptr); SIDESTEP_ASSERT(preamble_page->magic_ == kPreamblePageMagic); *(reinterpret_cast<void**>(block)) = preamble_page->free_; preamble_page->free_ = block; } void* PreamblePatcher::AllocPageNear(void* target) { MEMORY_BASIC_INFORMATION mbi = { 0 }; if (!::VirtualQuery(target, &mbi, sizeof(mbi))) { SIDESTEP_ASSERT(false && "VirtualQuery failed on target address"); return 0; } if (initialized_ == false) { PreamblePatcher::Initialize(); SIDESTEP_ASSERT(initialized_); } void* pv = NULL; unsigned char* allocation_base = reinterpret_cast<unsigned char*>( mbi.AllocationBase); __int64 i = 1; bool high_target = reinterpret_cast<__int64>(target) > UINT_MAX; while (pv == NULL) { __int64 val = reinterpret_cast<__int64>(allocation_base) - (i * granularity_); if (high_target && reinterpret_cast<__int64>(target) - val > INT_MAX) { // We're further than 2GB from the target break; } else if (val <= 0) { // Less than 0 break; } pv = ::VirtualAlloc(reinterpret_cast<void*>(allocation_base - (i++ * granularity_)), pagesize_, MEM_COMMIT | MEM_RESERVE, PAGE_EXECUTE_READWRITE); } // We couldn't allocate low, try to allocate high if (pv == NULL) { i = 1; // Round up to the next multiple of page granularity allocation_base = reinterpret_cast<unsigned char*>( (reinterpret_cast<__int64>(target) & (~(granularity_ - 1))) + granularity_); while (pv == NULL) { __int64 val = reinterpret_cast<__int64>(allocation_base) + (i * granularity_) - reinterpret_cast<__int64>(target); if (val > INT_MAX || val < 0) { // We're too far or we overflowed break; } pv = ::VirtualAlloc(reinterpret_cast<void*>(allocation_base + (i++ * granularity_)), pagesize_, MEM_COMMIT | MEM_RESERVE, PAGE_EXECUTE_READWRITE); } } return pv; } bool PreamblePatcher::IsShortConditionalJump( unsigned char* target, unsigned int instruction_size) { return (*(target) & 0x70) == 0x70 && instruction_size == 2; } bool PreamblePatcher::IsShortJump( unsigned char* target, unsigned int instruction_size) { return target[0] == 0xeb && instruction_size == 2; } bool PreamblePatcher::IsNearConditionalJump( unsigned char* target, unsigned int instruction_size) { return *(target) == 0xf && (*(target + 1) & 0x80) == 0x80 && instruction_size == 6; } bool PreamblePatcher::IsNearRelativeJump( unsigned char* target, unsigned int instruction_size) { return *(target) == 0xe9 && instruction_size == 5; } bool PreamblePatcher::IsNearAbsoluteCall( unsigned char* target, unsigned int instruction_size) { return *(target) == 0xff && (*(target + 1) & 0x10) == 0x10 && instruction_size == 6; } bool PreamblePatcher::IsNearRelativeCall( unsigned char* target, unsigned int instruction_size) { return *(target) == 0xe8 && instruction_size == 5; } bool PreamblePatcher::IsMovWithDisplacement( unsigned char* target, unsigned int instruction_size) { // In this case, the ModRM byte's mod field will be 0 and r/m will be 101b (5) return instruction_size == 7 && *target == 0x48 && *(target + 1) == 0x8b && (*(target + 2) >> 6) == 0 && (*(target + 2) & 0x7) == 5; } SideStepError PreamblePatcher::PatchShortConditionalJump( unsigned char* source, unsigned int instruction_size, unsigned char* target, unsigned int* target_bytes, unsigned int target_size) { // note: rel8 offset is signed. Thus we need to ask for signed char // to negative offsets right unsigned char* original_jump_dest = (source + 2) + static_cast<signed char>(source[1]); unsigned char* stub_jump_from = target + 6; __int64 fixup_jump_offset = original_jump_dest - stub_jump_from; if (fixup_jump_offset > INT_MAX || fixup_jump_offset < INT_MIN) { SIDESTEP_ASSERT(false && "Unable to fix up short jump because target" " is too far away."); return SIDESTEP_JUMP_INSTRUCTION; } *target_bytes = 6; if (target_size > *target_bytes) { // Convert the short jump to a near jump. // // 0f 8x xx xx xx xx = Jcc rel32off unsigned short jmpcode = ((0x80 | (source[0] & 0xf)) << 8) | 0x0f; memcpy(reinterpret_cast<void*>(target), reinterpret_cast<void*>(&jmpcode), 2); memcpy(reinterpret_cast<void*>(target + 2), reinterpret_cast<void*>(&fixup_jump_offset), 4); } return SIDESTEP_SUCCESS; } SideStepError PreamblePatcher::PatchShortJump( unsigned char* source, unsigned int instruction_size, unsigned char* target, unsigned int* target_bytes, unsigned int target_size) { // note: rel8 offset is _signed_. Thus we need signed char here. unsigned char* original_jump_dest = (source + 2) + static_cast<signed char>(source[1]); unsigned char* stub_jump_from = target + 5; __int64 fixup_jump_offset = original_jump_dest - stub_jump_from; if (fixup_jump_offset > INT_MAX || fixup_jump_offset < INT_MIN) { SIDESTEP_ASSERT(false && "Unable to fix up short jump because target" " is too far away."); return SIDESTEP_JUMP_INSTRUCTION; } *target_bytes = 5; if (target_size > *target_bytes) { // Convert the short jump to a near jump. // // e9 xx xx xx xx = jmp rel32off target[0] = 0xe9; memcpy(reinterpret_cast<void*>(target + 1), reinterpret_cast<void*>(&fixup_jump_offset), 4); } return SIDESTEP_SUCCESS; } SideStepError PreamblePatcher::PatchNearJumpOrCall( unsigned char* source, unsigned int instruction_size, unsigned char* target, unsigned int* target_bytes, unsigned int target_size) { SIDESTEP_ASSERT(instruction_size == 5 || instruction_size == 6); unsigned int jmp_offset_in_instruction = instruction_size == 5 ? 1 : 2; unsigned char* original_jump_dest = reinterpret_cast<unsigned char *>( reinterpret_cast<__int64>(source + instruction_size) + *(reinterpret_cast<int*>(source + jmp_offset_in_instruction))); unsigned char* stub_jump_from = target + instruction_size; __int64 fixup_jump_offset = original_jump_dest - stub_jump_from; if (fixup_jump_offset > INT_MAX || fixup_jump_offset < INT_MIN) { SIDESTEP_ASSERT(false && "Unable to fix up near jump because target" " is too far away."); return SIDESTEP_JUMP_INSTRUCTION; } if ((fixup_jump_offset < SCHAR_MAX && fixup_jump_offset > SCHAR_MIN)) { *target_bytes = 2; if (target_size > *target_bytes) { // If the new offset is in range, use a short jump instead of a near jump. if (source[0] == ASM_JCC32REL_0 && (source[1] & ASM_JCC32REL_1_MASK) == ASM_JCC32REL_1_MASK) { unsigned short jmpcode = (static_cast<unsigned char>( fixup_jump_offset) << 8) | (0x70 | (source[1] & 0xf)); memcpy(reinterpret_cast<void*>(target), reinterpret_cast<void*>(&jmpcode), 2); } else { target[0] = ASM_JMP8REL; target[1] = static_cast<unsigned char>(fixup_jump_offset); } } } else { *target_bytes = instruction_size; if (target_size > *target_bytes) { memcpy(reinterpret_cast<void*>(target), reinterpret_cast<void*>(source), jmp_offset_in_instruction); memcpy(reinterpret_cast<void*>(target + jmp_offset_in_instruction), reinterpret_cast<void*>(&fixup_jump_offset), 4); } } return SIDESTEP_SUCCESS; } SideStepError PreamblePatcher::PatchMovWithDisplacement( unsigned char* source, unsigned int instruction_size, unsigned char* target, unsigned int* target_bytes, unsigned int target_size) { SIDESTEP_ASSERT(instruction_size == 7); const int mov_offset_in_instruction = 3; // 0x48 0x8b 0x0d <offset> unsigned char* original_mov_dest = reinterpret_cast<unsigned char*>( reinterpret_cast<__int64>(source + instruction_size) + *(reinterpret_cast<int*>(source + mov_offset_in_instruction))); unsigned char* stub_mov_from = target + instruction_size; __int64 fixup_mov_offset = original_mov_dest - stub_mov_from; if (fixup_mov_offset > INT_MAX || fixup_mov_offset < INT_MIN) { SIDESTEP_ASSERT(false && "Unable to fix up near MOV because target is too far away."); return SIDESTEP_UNEXPECTED; } *target_bytes = instruction_size; if (target_size > *target_bytes) { memcpy(reinterpret_cast<void*>(target), reinterpret_cast<void*>(source), mov_offset_in_instruction); memcpy(reinterpret_cast<void*>(target + mov_offset_in_instruction), reinterpret_cast<void*>(&fixup_mov_offset), 4); } return SIDESTEP_SUCCESS; } }; // namespace sidestep
scheib/chromium
third_party/tcmalloc/vendor/src/windows/preamble_patcher.cc
C++
bsd-3-clause
29,199
// Copyright 2014 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build !go1.5 // Package oracle contains the implementation of the oracle tool whose // command-line is provided by golang.org/x/tools/cmd/oracle. // // http://golang.org/s/oracle-design // http://golang.org/s/oracle-user-manual // package oracle // import "golang.org/x/tools/oracle" // This file defines oracle.Query, the entry point for the oracle tool. // The actual executable is defined in cmd/oracle. // TODO(adonovan): new queries // - show all statements that may update the selected lvalue // (local, global, field, etc). // - show all places where an object of type T is created // (&T{}, var t T, new(T), new(struct{array [3]T}), etc. import ( "fmt" "go/ast" "go/build" "go/parser" "go/token" "io" "path/filepath" "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/loader" "golang.org/x/tools/go/pointer" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/types" "golang.org/x/tools/oracle/serial" ) type printfFunc func(pos interface{}, format string, args ...interface{}) // queryResult is the interface of each query-specific result type. type queryResult interface { toSerial(res *serial.Result, fset *token.FileSet) display(printf printfFunc) } // A QueryPos represents the position provided as input to a query: // a textual extent in the program's source code, the AST node it // corresponds to, and the package to which it belongs. // Instances are created by parseQueryPos. type queryPos struct { fset *token.FileSet start, end token.Pos // source extent of query path []ast.Node // AST path from query node to root of ast.File exact bool // 2nd result of PathEnclosingInterval info *loader.PackageInfo // type info for the queried package (nil for fastQueryPos) } // TypeString prints type T relative to the query position. func (qpos *queryPos) typeString(T types.Type) string { return types.TypeString(T, types.RelativeTo(qpos.info.Pkg)) } // ObjectString prints object obj relative to the query position. func (qpos *queryPos) objectString(obj types.Object) string { return types.ObjectString(obj, types.RelativeTo(qpos.info.Pkg)) } // SelectionString prints selection sel relative to the query position. func (qpos *queryPos) selectionString(sel *types.Selection) string { return types.SelectionString(sel, types.RelativeTo(qpos.info.Pkg)) } // A Query specifies a single oracle query. type Query struct { Mode string // query mode ("callers", etc) Pos string // query position Build *build.Context // package loading configuration // pointer analysis options Scope []string // main packages in (*loader.Config).FromArgs syntax PTALog io.Writer // (optional) pointer-analysis log file Reflection bool // model reflection soundly (currently slow). // Populated during Run() Fset *token.FileSet result queryResult } // Serial returns an instance of serial.Result, which implements the // {xml,json}.Marshaler interfaces so that query results can be // serialized as JSON or XML. // func (q *Query) Serial() *serial.Result { resj := &serial.Result{Mode: q.Mode} q.result.toSerial(resj, q.Fset) return resj } // WriteTo writes the oracle query result res to out in a compiler diagnostic format. func (q *Query) WriteTo(out io.Writer) { printf := func(pos interface{}, format string, args ...interface{}) { fprintf(out, q.Fset, pos, format, args...) } q.result.display(printf) } // Run runs an oracle query and populates its Fset and Result. func Run(q *Query) error { switch q.Mode { case "callees": return callees(q) case "callers": return callers(q) case "callstack": return callstack(q) case "peers": return peers(q) case "pointsto": return pointsto(q) case "whicherrs": return whicherrs(q) case "definition": return definition(q) case "describe": return describe(q) case "freevars": return freevars(q) case "implements": return implements(q) case "referrers": return referrers(q) case "what": return what(q) default: return fmt.Errorf("invalid mode: %q", q.Mode) } } func setPTAScope(lconf *loader.Config, scope []string) error { if len(scope) == 0 { return fmt.Errorf("no packages specified for pointer analysis scope") } // Determine initial packages for PTA. args, err := lconf.FromArgs(scope, true) if err != nil { return err } if len(args) > 0 { return fmt.Errorf("surplus arguments: %q", args) } return nil } // Create a pointer.Config whose scope is the initial packages of lprog // and their dependencies. func setupPTA(prog *ssa.Program, lprog *loader.Program, ptaLog io.Writer, reflection bool) (*pointer.Config, error) { // TODO(adonovan): the body of this function is essentially // duplicated in all go/pointer clients. Refactor. // For each initial package (specified on the command line), // if it has a main function, analyze that, // otherwise analyze its tests, if any. var testPkgs, mains []*ssa.Package for _, info := range lprog.InitialPackages() { initialPkg := prog.Package(info.Pkg) // Add package to the pointer analysis scope. if initialPkg.Func("main") != nil { mains = append(mains, initialPkg) } else { testPkgs = append(testPkgs, initialPkg) } } if testPkgs != nil { if p := prog.CreateTestMainPackage(testPkgs...); p != nil { mains = append(mains, p) } } if mains == nil { return nil, fmt.Errorf("analysis scope has no main and no tests") } return &pointer.Config{ Log: ptaLog, Reflection: reflection, Mains: mains, }, nil } // importQueryPackage finds the package P containing the // query position and tells conf to import it. // It returns the package's path. func importQueryPackage(pos string, conf *loader.Config) (string, error) { fqpos, err := fastQueryPos(pos) if err != nil { return "", err // bad query } filename := fqpos.fset.File(fqpos.start).Name() // This will not work for ad-hoc packages // such as $GOROOT/src/net/http/triv.go. // TODO(adonovan): ensure we report a clear error. _, importPath, err := guessImportPath(filename, conf.Build) if err != nil { return "", err // can't find GOPATH dir } if importPath == "" { return "", fmt.Errorf("can't guess import path from %s", filename) } // Check that it's possible to load the queried package. // (e.g. oracle tests contain different 'package' decls in same dir.) // Keep consistent with logic in loader/util.go! cfg2 := *conf.Build cfg2.CgoEnabled = false bp, err := cfg2.Import(importPath, "", 0) if err != nil { return "", err // no files for package } switch pkgContainsFile(bp, filename) { case 'T': conf.ImportWithTests(importPath) case 'X': conf.ImportWithTests(importPath) importPath += "_test" // for TypeCheckFuncBodies case 'G': conf.Import(importPath) default: return "", fmt.Errorf("package %q doesn't contain file %s", importPath, filename) } conf.TypeCheckFuncBodies = func(p string) bool { return p == importPath } return importPath, nil } // pkgContainsFile reports whether file was among the packages Go // files, Test files, eXternal test files, or not found. func pkgContainsFile(bp *build.Package, filename string) byte { for i, files := range [][]string{bp.GoFiles, bp.TestGoFiles, bp.XTestGoFiles} { for _, file := range files { if sameFile(filepath.Join(bp.Dir, file), filename) { return "GTX"[i] } } } return 0 // not found } // ParseQueryPos parses the source query position pos and returns the // AST node of the loaded program lprog that it identifies. // If needExact, it must identify a single AST subtree; // this is appropriate for queries that allow fairly arbitrary syntax, // e.g. "describe". // func parseQueryPos(lprog *loader.Program, posFlag string, needExact bool) (*queryPos, error) { filename, startOffset, endOffset, err := parsePosFlag(posFlag) if err != nil { return nil, err } start, end, err := findQueryPos(lprog.Fset, filename, startOffset, endOffset) if err != nil { return nil, err } info, path, exact := lprog.PathEnclosingInterval(start, end) if path == nil { return nil, fmt.Errorf("no syntax here") } if needExact && !exact { return nil, fmt.Errorf("ambiguous selection within %s", astutil.NodeDescription(path[0])) } return &queryPos{lprog.Fset, start, end, path, exact, info}, nil } // ---------- Utilities ---------- // allowErrors causes type errors to be silently ignored. // (Not suitable if SSA construction follows.) func allowErrors(lconf *loader.Config) { ctxt := *lconf.Build // copy ctxt.CgoEnabled = false lconf.Build = &ctxt lconf.AllowErrors = true // AllErrors makes the parser always return an AST instead of // bailing out after 10 errors and returning an empty ast.File. lconf.ParserMode = parser.AllErrors lconf.TypeChecker.Error = func(err error) {} } // ptrAnalysis runs the pointer analysis and returns its result. func ptrAnalysis(conf *pointer.Config) *pointer.Result { result, err := pointer.Analyze(conf) if err != nil { panic(err) // pointer analysis internal error } return result } func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } // deref returns a pointer's element type; otherwise it returns typ. func deref(typ types.Type) types.Type { if p, ok := typ.Underlying().(*types.Pointer); ok { return p.Elem() } return typ } // fprintf prints to w a message of the form "location: message\n" // where location is derived from pos. // // pos must be one of: // - a token.Pos, denoting a position // - an ast.Node, denoting an interval // - anything with a Pos() method: // ssa.Member, ssa.Value, ssa.Instruction, types.Object, pointer.Label, etc. // - a QueryPos, denoting the extent of the user's query. // - nil, meaning no position at all. // // The output format is is compatible with the 'gnu' // compilation-error-regexp in Emacs' compilation mode. // TODO(adonovan): support other editors. // func fprintf(w io.Writer, fset *token.FileSet, pos interface{}, format string, args ...interface{}) { var start, end token.Pos switch pos := pos.(type) { case ast.Node: start = pos.Pos() end = pos.End() case token.Pos: start = pos end = start case interface { Pos() token.Pos }: start = pos.Pos() end = start case *queryPos: start = pos.start end = pos.end case nil: // no-op default: panic(fmt.Sprintf("invalid pos: %T", pos)) } if sp := fset.Position(start); start == end { // (prints "-: " for token.NoPos) fmt.Fprintf(w, "%s: ", sp) } else { ep := fset.Position(end) // The -1 below is a concession to Emacs's broken use of // inclusive (not half-open) intervals. // Other editors may not want it. // TODO(adonovan): add an -editor=vim|emacs|acme|auto // flag; auto uses EMACS=t / VIM=... / etc env vars. fmt.Fprintf(w, "%s:%d.%d-%d.%d: ", sp.Filename, sp.Line, sp.Column, ep.Line, ep.Column-1) } fmt.Fprintf(w, format, args...) io.WriteString(w, "\n") }
muzining/net
x/tools/oracle/oracle14.go
GO
bsd-3-clause
11,129
var fs = require('fs') , child_process = require('child_process') , _glob = require('glob') , bunch = require('./bunch') ; exports.loadEnv = function loadEnv(env, cb) { var loaders = [] function load(name, cb) { fs.readFile(env[name], function(error, data) { env[name] = env[name].match(/.*\.json$/) ? JSON.parse(data) : data; cb(error, data) }) } for (var name in env) { loaders.push([load, name]) } bunch(loaders, cb) } exports.commandActor = function command(executable) { return function command(args, opts, cb) { if (!cb) { cb = opts; opts = {} } var cmd = child_process.spawn(executable, args, opts); function log(b) { console.log(b.toString()) } cmd.stdout.on('data', log); cmd.stderr.on('data', log); cmd.on('exit', function(code) { if (code) { cb(new Error(executable + ' exited with status ' + code)); } else { cb(); } }); return cmd; } } exports.jsonParse = function(str, cb) { try { cb(null, JSON.parse(str)); } catch (ex) { cb(ex); } } exports.jsonStringify = function(obj, cb) { try { cb(null, JSON.stringify(obj)); } catch (ex) { cb(ex); } } exports.glob = function glob(pattern, cb) { console.log('pattern', pattern); _glob(pattern, function(error, files) { cb(error, [files]); }); }
KhaosT/node_mdns
utils/lib/actors.js
JavaScript
mit
1,375
Function.prototype.bind = Function.prototype.bind || function (target) { var self = this; return function (args) { if (!(args instanceof Array)) { args = [args]; } self.apply(target, args); }; };
ThatSonnyD/brad-pitt-2048
tile-sets/brad pitt/js/bind_polyfill.js
JavaScript
mit
229
// Copyright (c) .NET Foundation and contributors. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. #if NET45 using System; using System.Runtime.InteropServices; namespace Microsoft.DotNet.PlatformAbstractions.Native { internal static partial class NativeMethods { public static class Unix { public unsafe static string GetUname() { // Utsname shouldn't be larger than 2K var buf = stackalloc byte[2048]; try { if (uname((IntPtr)buf) == 0) { return Marshal.PtrToStringAnsi((IntPtr)buf); } } catch (Exception ex) { throw new PlatformNotSupportedException("Error reading Unix name", ex); } throw new PlatformNotSupportedException("Unknown error reading Unix name"); } [DllImport("libc")] private static extern int uname(IntPtr utsname); } } } #endif
gkhanna79/core-setup
src/managed/Microsoft.DotNet.PlatformAbstractions/Native/NativeMethods.Unix.cs
C#
mit
1,173
from __future__ import print_function import soco """ Prints the name of each discovered player in the network. """ for zone in soco.discover(): print(zone.player_name)
dundeemt/SoCo
examples/commandline/discover.py
Python
mit
175
// DATA_TEMPLATE: js_data oTest.fnStart( "oLanguage.oPaginate" ); /* Note that the paging language information only has relevence in full numbers */ $(document).ready( function () { /* Check the default */ var oTable = $('#example').dataTable( { "aaData": gaaData, "sPaginationType": "full_numbers" } ); var oSettings = oTable.fnSettings(); oTest.fnTest( "oLanguage.oPaginate defaults", null, function () { var bReturn = oSettings.oLanguage.oPaginate.sFirst == "First" && oSettings.oLanguage.oPaginate.sPrevious == "Previous" && oSettings.oLanguage.oPaginate.sNext == "Next" && oSettings.oLanguage.oPaginate.sLast == "Last"; return bReturn; } ); oTest.fnTest( "oLanguage.oPaginate defaults are in the DOM", null, function () { var bReturn = $('#example_paginate .first').html() == "First" && $('#example_paginate .previous').html() == "Previous" && $('#example_paginate .next').html() == "Next" && $('#example_paginate .last').html() == "Last"; return bReturn; } ); oTest.fnTest( "oLanguage.oPaginate can be defined", function () { oSession.fnRestore(); oTable = $('#example').dataTable( { "aaData": gaaData, "sPaginationType": "full_numbers", "oLanguage": { "oPaginate": { "sFirst": "unit1", "sPrevious": "test2", "sNext": "unit3", "sLast": "test4" } } } ); oSettings = oTable.fnSettings(); }, function () { var bReturn = oSettings.oLanguage.oPaginate.sFirst == "unit1" && oSettings.oLanguage.oPaginate.sPrevious == "test2" && oSettings.oLanguage.oPaginate.sNext == "unit3" && oSettings.oLanguage.oPaginate.sLast == "test4"; return bReturn; } ); oTest.fnTest( "oLanguage.oPaginate definitions are in the DOM", null, function () { var bReturn = $('#example_paginate .first').html() == "unit1" && $('#example_paginate .previous').html() == "test2" && $('#example_paginate .next').html() == "unit3" && $('#example_paginate .last').html() == "test4"; return bReturn; } ); oTest.fnComplete(); } );
desarrollotissat/web-interface
web/js/DataTables-1.9.4/media/unit_testing/tests_onhold/2_js/oLanguage.oPaginate.js
JavaScript
mit
2,212
namespace Simple.Data.UnitTest { using System; using System.Collections.Generic; using NUnit.Framework; [TestFixture] public class AdapterFactoryTest { private static AdapterFactory CreateTarget() { return new CachingAdapterFactory(new StubComposer()); } [Test] [ExpectedException(typeof(ArgumentException))] public void CreateWithAnonymousObjectWithoutConnectionStringThrowsArgumentException() { CreateTarget().Create(new { X = "" }); } [Test] public void CreateWithName() { var actual = CreateTarget().Create("Stub", null); Assert.IsNotNull(actual); } } class StubComposer : Composer { public override T Compose<T>() { return (T) Create(); } public override T Compose<T>(string contractName) { return (T)Create(); } private object Create() { return new StubAdapter(); } } class StubAdapter : Adapter { public override IDictionary<string, object> GetKey(string tableName, IDictionary<string, object> record) { throw new NotImplementedException(); } public override IList<string> GetKeyNames(string tableName) { throw new NotImplementedException(); } public override IDictionary<string, object> Get(string tableName, params object[] keyValues) { throw new NotImplementedException(); } public override IEnumerable<IDictionary<string, object>> Find(string tableName, SimpleExpression criteria) { throw new NotImplementedException(); } public override IEnumerable<IDictionary<string, object>> RunQuery(SimpleQuery query, out IEnumerable<SimpleQueryClauseBase> unhandledClauses) { throw new NotImplementedException(); } public override IDictionary<string, object> Insert(string tableName, IDictionary<string, object> data, bool resultRequired) { throw new NotImplementedException(); } public override int Update(string tableName, IDictionary<string, object> data, SimpleExpression criteria) { throw new NotImplementedException(); } public override int Delete(string tableName, SimpleExpression criteria) { throw new NotImplementedException(); } public override IEnumerable<IEnumerable<IDictionary<string, object>>> RunQueries(SimpleQuery[] queries, List<IEnumerable<SimpleQueryClauseBase>> unhandledClauses) { throw new NotImplementedException(); } public override bool IsExpressionFunction(string functionName, params object[] args) { throw new NotImplementedException(); } } }
ronnyek/Simple.Data
Simple.Data.UnitTest/AdapterFactoryTest.cs
C#
mit
3,058
<?php /** * @file * Contains \Drupal\Console\Generator\Generator. */ namespace Drupal\Console\Generator; use Drupal\Console\Helper\HelperTrait; use Drupal\Console\Style\DrupalStyle; class Generator { use HelperTrait; /** * @var array */ private $files; /** * @var bool */ private $learning = false; /** * @var array */ private $helperSet; /** * @var DrupalStyle $io */ protected $io; /** * @param string $template * @param string $target * @param array $parameters * @param null $flag * * @return bool */ protected function renderFile($template, $target, $parameters, $flag = null) { if (!is_dir(dirname($target))) { mkdir(dirname($target), 0777, true); } if (file_put_contents($target, $this->getRenderHelper()->render($template, $parameters), $flag)) { $this->files[] = str_replace($this->getDrupalHelper()->getRoot().'/', '', $target); return true; } return false; } /** * @param $helperSet */ public function setHelperSet($helperSet) { $this->helperSet = $helperSet; } /** * @return array */ public function getHelperSet() { return $this->helperSet; } /** * @return array */ public function getFiles() { return $this->files; } /** * @param $learning */ public function setLearning($learning) { $this->learning = $learning; } /** * @return bool */ public function isLearning() { return $this->learning; } /** * @param DrupalStyle $io */ public function setIo($io) { $this->io = $io; } }
sgrichards/BrightonDrupal
vendor/drupal/console/src/Generator/Generator.php
PHP
gpl-2.0
1,821
// { dg-do compile } // { dg-options "-O -w -Wno-psabi" } typedef int vec __attribute__((vector_size(32))); vec fn1() { vec x, zero{}; vec one = zero + 1; return x < zero ? one : zero; }
Gurgel100/gcc
gcc/testsuite/g++.dg/pr86159.C
C++
gpl-2.0
194
/** * Notification.js * * Released under LGPL License. * Copyright (c) 1999-2015 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ /** * Creates a notification instance. * * @-x-less Notification.less * @class tinymce.ui.Notification * @extends tinymce.ui.Container * @mixes tinymce.ui.Movable */ define("tinymce/ui/Notification", [ "tinymce/ui/Control", "tinymce/ui/Movable", "tinymce/ui/Progress", "tinymce/util/Delay" ], function(Control, Movable, Progress, Delay) { return Control.extend({ Mixins: [Movable], Defaults: { classes: 'widget notification' }, init: function(settings) { var self = this; self._super(settings); if (settings.text) { self.text(settings.text); } if (settings.icon) { self.icon = settings.icon; } if (settings.color) { self.color = settings.color; } if (settings.type) { self.classes.add('notification-' + settings.type); } if (settings.timeout && (settings.timeout < 0 || settings.timeout > 0) && !settings.closeButton) { self.closeButton = false; } else { self.classes.add('has-close'); self.closeButton = true; } if (settings.progressBar) { self.progressBar = new Progress(); } self.on('click', function(e) { if (e.target.className.indexOf(self.classPrefix + 'close') != -1) { self.close(); } }); }, /** * Renders the control as a HTML string. * * @method renderHtml * @return {String} HTML representing the control. */ renderHtml: function() { var self = this, prefix = self.classPrefix, icon = '', closeButton = '', progressBar = '', notificationStyle = ''; if (self.icon) { icon = '<i class="' + prefix + 'ico' + ' ' + prefix + 'i-' + self.icon + '"></i>'; } if (self.color) { notificationStyle = ' style="background-color: ' + self.color + '"'; } if (self.closeButton) { closeButton = '<button type="button" class="' + prefix + 'close" aria-hidden="true">\u00d7</button>'; } if (self.progressBar) { progressBar = self.progressBar.renderHtml(); } return ( '<div id="' + self._id + '" class="' + self.classes + '"' + notificationStyle + ' role="presentation">' + icon + '<div class="' + prefix + 'notification-inner">' + self.state.get('text') + '</div>' + progressBar + closeButton + '</div>' ); }, postRender: function() { var self = this; Delay.setTimeout(function() { self.$el.addClass(self.classPrefix + 'in'); }); return self._super(); }, bindStates: function() { var self = this; self.state.on('change:text', function(e) { self.getEl().childNodes[1].innerHTML = e.value; }); if (self.progressBar) { self.progressBar.bindStates(); } return self._super(); }, close: function() { var self = this; if (!self.fire('close').isDefaultPrevented()) { self.remove(); } return self; }, /** * Repaints the control after a layout operation. * * @method repaint */ repaint: function() { var self = this, style, rect; style = self.getEl().style; rect = self._layoutRect; style.left = rect.x + 'px'; style.top = rect.y + 'px'; // Hardcoded arbitrary z-value because we want the // notifications under the other windows style.zIndex = 0xFFFF - 1; } }); });
luis-knd/technoMvc
views/tinymce/js/tinymce/classes/ui/Notification.js
JavaScript
gpl-2.0
3,429
<?php /** * Generic Slider super class. Extended by library specific classes. */ class MetaImageSlide extends MetaSlide { /** * Register slide type */ public function __construct() { add_filter( 'metaslider_get_image_slide', array( $this, 'get_slide' ), 10, 2 ); add_action( 'metaslider_save_image_slide', array( $this, 'save_slide' ), 5, 3 ); add_action( 'wp_ajax_create_image_slide', array( $this, 'ajax_create_slide' ) ); add_action( 'wp_ajax_resize_image_slide', array( $this, 'ajax_resize_slide' ) ); } /** * Create a new slide and echo the admin HTML */ public function ajax_create_slide() { // security check if ( !wp_verify_nonce( $_REQUEST['_wpnonce'], 'metaslider_addslide' ) ) { echo "<tr><td colspan='2'>" . __( "Security check failed. Refresh page and try again.", 'metaslider' ) . "</td></tr>"; die(); } $slider_id = absint( $_POST['slider_id'] ); $selection = $_POST['selection']; if ( is_array( $selection ) && count( $selection ) && $slider_id > 0 ) { foreach ( $selection as $slide_id ) { $this->set_slide( $slide_id ); $this->set_slider( $slider_id ); if ( $this->slide_exists_in_slideshow( $slider_id, $slide_id ) ) { echo "<tr><td colspan='2'>ID: {$slide_id} \"" . get_the_title( $slide_id ) . "\" - " . __( "Failed to add slide. Slide already exists in slideshow.", 'metaslider' ) . "</td></tr>"; } else if ( !$this->slide_is_unassigned_or_image_slide( $slider_id, $slide_id ) ) { echo "<tr><td colspan='2'>ID: {$slide_id} \"" . get_the_title( $slide_id ) . "\" - " . __( "Failed to add slide. Slide is not of type 'image'.", 'metaslider' ) . "</td></tr>"; }else { $this->tag_slide_to_slider(); $this->add_or_update_or_delete_meta( $slide_id, 'type', 'image' ); // override the width and height to kick off the AJAX image resizing on save $this->settings['width'] = 0; $this->settings['height'] = 0; echo $this->get_admin_slide(); } } } die(); } /** * Create a new slide and echo the admin HTML */ public function ajax_resize_slide() { check_admin_referer( 'metaslider_resize' ); $slider_id = absint( $_POST['slider_id'] ); $slide_id = absint( $_POST['slide_id'] ); $this->set_slide( $slide_id ); $this->set_slider( $slider_id ); $settings = get_post_meta( $slider_id, 'ml-slider_settings', true ); // create a copy of the correct sized image $imageHelper = new MetaSliderImageHelper( $slide_id, $settings['width'], $settings['height'], isset( $settings['smartCrop'] ) ? $settings['smartCrop'] : 'false', $this->use_wp_image_editor() ); $url = $imageHelper->get_image_url(); echo $url . " (" . $settings['width'] . 'x' . $settings['height'] . ")"; die(); } /** * Return the HTML used to display this slide in the admin screen * * @return string slide html */ protected function get_admin_slide() { // get some slide settings $imageHelper = new MetaSliderImageHelper( $this->slide->ID, 150, 150, 'false', $this->use_wp_image_editor() ); $thumb = $imageHelper->get_image_url(); $url = get_post_meta( $this->slide->ID, 'ml-slider_url', true ); $title = get_post_meta( $this->slide->ID, 'ml-slider_title', true ); $alt = get_post_meta( $this->slide->ID, '_wp_attachment_image_alt', true ); $target = get_post_meta( $this->slide->ID, 'ml-slider_new_window', true ) ? 'checked=checked' : ''; $caption = htmlentities( $this->slide->post_excerpt, ENT_QUOTES, 'UTF-8' ); // localisation $str_caption = __( "Caption", "metaslider" ); $str_new_window = __( "New Window", "metaslider" ); $str_url = __( "URL", "metaslider" ); $str_label = __( "Image Slide", "metaslider" ); $slide_label = apply_filters( "metaslider_image_slide_label", $str_label, $this->slide, $this->settings ); // slide row HTML $row = "<tr class='slide image flex responsive nivo coin'>"; $row .= " <td class='col-1'>"; $row .= " <div class='thumb' style='background-image: url({$thumb})'>"; $row .= " <a class='delete-slide confirm' href='?page=metaslider&amp;id={$this->slider->ID}&amp;deleteSlide={$this->slide->ID}'>x</a>"; $row .= " <span class='slide-details'>" . $slide_label . "</span>"; $row .= " </div>"; $row .= " </td>"; $row .= " <td class='col-2'>"; $row .= " <ul class='tabs'>"; $row .= " <li class='selected' rel='tab-1'>" . __( "General", "metaslider" ) . "</li>"; $row .= " <li rel='tab-2'>" . __( "SEO", "metaslider" ) . "</li>"; $row .= " </ul>"; $row .= " <div class='tabs-content'>"; $row .= " <div class='tab tab-1'>"; if ( !$this->is_valid_image() ) { $row .= "<div class='warning'>" . __( "Warning: Image data does not exist. Please re-upload the image.", "metaslider" ) . "</div>"; } $row .= " <textarea name='attachment[{$this->slide->ID}][post_excerpt]' placeholder='{$str_caption}'>{$caption}</textarea>"; $row .= " <input class='url' type='text' name='attachment[{$this->slide->ID}][url]' placeholder='{$str_url}' value='{$url}' />"; $row .= " <div class='new_window'>"; $row .= " <label>{$str_new_window}<input type='checkbox' name='attachment[{$this->slide->ID}][new_window]' {$target} /></label>"; $row .= " </div>"; $row .= " </div>"; $row .= " <div class='tab tab-2' style='display: none;'>"; $row .= " <div class='row'><label>" . __( "Image Title Text", "metaslider" ) . "</label></div>"; $row .= " <div class='row'><input type='text' size='50' name='attachment[{$this->slide->ID}][title]' value='{$title}' /></div>"; $row .= " <div class='row'><label>" . __( "Image Alt Text", "metaslider" ) . "</label></div>"; $row .= " <div class='row'><input type='text' size='50' name='attachment[{$this->slide->ID}][alt]' value='{$alt}' /></div>"; $row .= " </div>"; $row .= " </div>"; $row .= " <input type='hidden' name='attachment[{$this->slide->ID}][type]' value='image' />"; $row .= " <input type='hidden' class='menu_order' name='attachment[{$this->slide->ID}][menu_order]' value='{$this->slide->menu_order}' />"; $row .= " <input type='hidden' name='resize_slide_id' data-slide_id='{$this->slide->ID}' data-width='{$this->settings['width']}' data-height='{$this->settings['height']}' />"; $row .= " </td>"; $row .= "</tr>"; return $row; } /** * Check to see if metadata exists for this image. Assume the image is * valid if metadata and a size exists for it (generated during initial * upload to WordPress). * * @return bool, true if metadata and size exists. */ public function is_valid_image() { $meta = wp_get_attachment_metadata( $this->slide->ID ); return isset( $meta['width'], $meta['height'] ); } /** * Disable/enable image editor * * @return bool */ public function use_wp_image_editor() { return apply_filters( 'metaslider_use_image_editor', $this->is_valid_image() ); } /** * Returns the HTML for the public slide * * @return string slide html */ protected function get_public_slide() { // get the image url (and handle cropping) // disable wp_image_editor if metadata does not exist for the slide $imageHelper = new MetaSliderImageHelper( $this->slide->ID, $this->settings['width'], $this->settings['height'], isset( $this->settings['smartCrop'] ) ? $this->settings['smartCrop'] : 'false', $this->use_wp_image_editor() ); $thumb = $imageHelper->get_image_url(); // store the slide details $slide = array( 'id' => $this->slide->ID, 'url' => __( get_post_meta( $this->slide->ID, 'ml-slider_url', true ) ), 'title' => __( get_post_meta( $this->slide->ID, 'ml-slider_title', true ) ), 'target' => get_post_meta( $this->slide->ID, 'ml-slider_new_window', true ) ? '_blank' : '_self', 'src' => $thumb, 'thumb' => $thumb, // backwards compatibility with Vantage 'width' => $this->settings['width'], 'height' => $this->settings['height'], 'alt' => __( get_post_meta( $this->slide->ID, '_wp_attachment_image_alt', true ) ), 'caption' => __( html_entity_decode( do_shortcode( $this->slide->post_excerpt ), ENT_NOQUOTES, 'UTF-8' ) ), 'caption_raw' => __( do_shortcode( $this->slide->post_excerpt ) ), 'class' => "slider-{$this->slider->ID} slide-{$this->slide->ID}", 'rel' => "", 'data-thumb' => "" ); // fix slide URLs if ( strpos( $slide['url'], 'www.' ) === 0 ) { $slide['url'] = 'http://' . $slide['url']; } $slide = apply_filters( 'metaslider_image_slide_attributes', $slide, $this->slider->ID, $this->settings ); // return the slide HTML switch ( $this->settings['type'] ) { case "coin": return $this->get_coin_slider_markup( $slide ); case "flex": return $this->get_flex_slider_markup( $slide ); case "nivo": return $this->get_nivo_slider_markup( $slide ); case "responsive": return $this->get_responsive_slides_markup( $slide ); default: return $this->get_flex_slider_markup( $slide ); } } /** * Generate nivo slider markup * * @return string slide html */ private function get_nivo_slider_markup( $slide ) { $attributes = apply_filters( 'metaslider_nivo_slider_image_attributes', array( 'src' => $slide['src'], 'height' => $slide['height'], 'width' => $slide['width'], 'data-title' => htmlentities( $slide['caption_raw'], ENT_QUOTES, 'UTF-8' ), 'data-thumb' => $slide['data-thumb'], 'title' => $slide['title'], 'alt' => $slide['alt'], 'rel' => $slide['rel'], 'class' => $slide['class'] ), $slide, $this->slider->ID ); $html = $this->build_image_tag( $attributes ); $anchor_attributes = apply_filters( 'metaslider_nivo_slider_anchor_attributes', array( 'href' => $slide['url'], 'target' => $slide['target'] ), $slide, $this->slider->ID ); if ( strlen( $anchor_attributes['href'] ) ) { $html = $this->build_anchor_tag( $anchor_attributes, $html ); } return apply_filters( 'metaslider_image_nivo_slider_markup', $html, $slide, $this->settings ); } /** * Generate flex slider markup * * @return string slide html */ private function get_flex_slider_markup( $slide ) { $attributes = apply_filters( 'metaslider_flex_slider_image_attributes', array( 'src' => $slide['src'], 'height' => $slide['height'], 'width' => $slide['width'], 'alt' => $slide['alt'], 'rel' => $slide['rel'], 'class' => $slide['class'], 'title' => $slide['title'] ), $slide, $this->slider->ID ); $html = $this->build_image_tag( $attributes ); $anchor_attributes = apply_filters( 'metaslider_flex_slider_anchor_attributes', array( 'href' => $slide['url'], 'target' => $slide['target'] ), $slide, $this->slider->ID ); if ( strlen( $anchor_attributes['href'] ) ) { $html = $this->build_anchor_tag( $anchor_attributes, $html ); } // add caption if ( strlen( $slide['caption'] ) ) { $html .= '<div class="caption-wrap"><div class="caption">' . $slide['caption'] . '</div></div>'; } $thumb = isset( $slide['data-thumb'] ) && strlen( $slide['data-thumb'] ) ? " data-thumb=\"{$slide['data-thumb']}\"" : ""; $html = '<li style="display: none; float: left; width: 100%;"' . $thumb . '>' . $html . '</li>'; return apply_filters( 'metaslider_image_flex_slider_markup', $html, $slide, $this->settings ); } /** * Generate coin slider markup * * @return string slide html */ private function get_coin_slider_markup( $slide ) { $attributes = apply_filters( 'metaslider_coin_slider_image_attributes', array( 'src' => $slide['src'], 'height' => $slide['height'], 'width' => $slide['width'], 'alt' => $slide['alt'], 'rel' => $slide['rel'], 'class' => $slide['class'], 'title' => $slide['title'], 'style' => 'display: none;' ), $slide, $this->slider->ID ); $html = $this->build_image_tag( $attributes ); if ( strlen( $slide['caption'] ) ) { $html .= "<span>{$slide['caption']}</span>"; } $attributes = apply_filters( 'metaslider_coin_slider_anchor_attributes', array( 'href' => strlen( $slide['url'] ) ? $slide['url'] : 'javascript:void(0)' ), $slide, $this->slider->ID ); $html = $this->build_anchor_tag( $attributes, $html ); return apply_filters( 'metaslider_image_coin_slider_markup', $html, $slide, $this->settings ); } /** * Generate responsive slides markup * * @return string slide html */ private function get_responsive_slides_markup( $slide ) { $attributes = apply_filters( 'metaslider_responsive_slider_image_attributes', array( 'src' => $slide['src'], 'height' => $slide['height'], 'width' => $slide['width'], 'alt' => $slide['alt'], 'rel' => $slide['rel'], 'class' => $slide['class'], 'title' => $slide['title'] ), $slide, $this->slider->ID ); $html = $this->build_image_tag( $attributes ); if ( strlen( $slide['caption'] ) ) { $html .= '<div class="caption-wrap"><div class="caption">' . $slide['caption'] . '</div></div>'; } $anchor_attributes = apply_filters( 'metaslider_responsive_slider_anchor_attributes', array( 'href' => $slide['url'], 'target' => $slide['target'] ), $slide, $this->slider->ID ); if ( strlen( $anchor_attributes['href'] ) ) { $html = $this->build_anchor_tag( $anchor_attributes, $html ); } return apply_filters( 'metaslider_image_responsive_slider_markup', $html, $slide, $this->settings ); } /** * Save */ protected function save( $fields ) { // update the slide wp_update_post( array( 'ID' => $this->slide->ID, 'post_excerpt' => $fields['post_excerpt'], 'menu_order' => $fields['menu_order'] ) ); // store the URL as a meta field against the attachment $this->add_or_update_or_delete_meta( $this->slide->ID, 'url', $fields['url'] ); $this->add_or_update_or_delete_meta( $this->slide->ID, 'title', $fields['title'] ); if ( isset( $fields['alt'] ) ) { update_post_meta( $this->slide->ID, '_wp_attachment_image_alt', $fields['alt'] ); } // store the 'new window' setting $new_window = isset( $fields['new_window'] ) && $fields['new_window'] == 'on' ? 'true' : 'false'; $this->add_or_update_or_delete_meta( $this->slide->ID, 'new_window', $new_window ); } } ?>
jackcommon/vnusa
wp-content/plugins/ml-slider/inc/slide/metaslide.image.class.php
PHP
gpl-2.0
16,682
/* * Copyright (c) 2002, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ #include "precompiled.hpp" #include "classfile/symbolTable.hpp" #include "gc_implementation/parallelScavenge/cardTableExtension.hpp" #include "gc_implementation/parallelScavenge/gcTaskManager.hpp" #include "gc_implementation/parallelScavenge/generationSizer.hpp" #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp" #include "gc_implementation/parallelScavenge/psAdaptiveSizePolicy.hpp" #include "gc_implementation/parallelScavenge/psMarkSweep.hpp" #include "gc_implementation/parallelScavenge/psParallelCompact.hpp" #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp" #include "gc_implementation/parallelScavenge/psTasks.hpp" #include "gc_implementation/shared/isGCActiveMark.hpp" #include "gc_implementation/shared/spaceDecorator.hpp" #include "gc_interface/gcCause.hpp" #include "memory/collectorPolicy.hpp" #include "memory/gcLocker.inline.hpp" #include "memory/referencePolicy.hpp" #include "memory/referenceProcessor.hpp" #include "memory/resourceArea.hpp" #include "oops/oop.inline.hpp" #include "oops/oop.psgc.inline.hpp" #include "runtime/biasedLocking.hpp" #include "runtime/fprofiler.hpp" #include "runtime/handles.inline.hpp" #include "runtime/threadCritical.hpp" #include "runtime/vmThread.hpp" #include "runtime/vm_operations.hpp" #include "services/memoryService.hpp" #include "utilities/stack.inline.hpp" HeapWord* PSScavenge::_to_space_top_before_gc = NULL; int PSScavenge::_consecutive_skipped_scavenges = 0; ReferenceProcessor* PSScavenge::_ref_processor = NULL; CardTableExtension* PSScavenge::_card_table = NULL; bool PSScavenge::_survivor_overflow = false; int PSScavenge::_tenuring_threshold = 0; HeapWord* PSScavenge::_young_generation_boundary = NULL; elapsedTimer PSScavenge::_accumulated_time; Stack<markOop> PSScavenge::_preserved_mark_stack; Stack<oop> PSScavenge::_preserved_oop_stack; CollectorCounters* PSScavenge::_counters = NULL; bool PSScavenge::_promotion_failed = false; // Define before use class PSIsAliveClosure: public BoolObjectClosure { public: void do_object(oop p) { assert(false, "Do not call."); } bool do_object_b(oop p) { return (!PSScavenge::is_obj_in_young((HeapWord*) p)) || p->is_forwarded(); } }; PSIsAliveClosure PSScavenge::_is_alive_closure; class PSKeepAliveClosure: public OopClosure { protected: MutableSpace* _to_space; PSPromotionManager* _promotion_manager; public: PSKeepAliveClosure(PSPromotionManager* pm) : _promotion_manager(pm) { ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity"); _to_space = heap->young_gen()->to_space(); assert(_promotion_manager != NULL, "Sanity"); } template <class T> void do_oop_work(T* p) { assert (!oopDesc::is_null(*p), "expected non-null ref"); assert ((oopDesc::load_decode_heap_oop_not_null(p))->is_oop(), "expected an oop while scanning weak refs"); // Weak refs may be visited more than once. if (PSScavenge::should_scavenge(p, _to_space)) { PSScavenge::copy_and_push_safe_barrier(_promotion_manager, p); } } virtual void do_oop(oop* p) { PSKeepAliveClosure::do_oop_work(p); } virtual void do_oop(narrowOop* p) { PSKeepAliveClosure::do_oop_work(p); } }; class PSEvacuateFollowersClosure: public VoidClosure { private: PSPromotionManager* _promotion_manager; public: PSEvacuateFollowersClosure(PSPromotionManager* pm) : _promotion_manager(pm) {} virtual void do_void() { assert(_promotion_manager != NULL, "Sanity"); _promotion_manager->drain_stacks(true); guarantee(_promotion_manager->stacks_empty(), "stacks should be empty at this point"); } }; class PSPromotionFailedClosure : public ObjectClosure { virtual void do_object(oop obj) { if (obj->is_forwarded()) { obj->init_mark(); } } }; class PSRefProcTaskProxy: public GCTask { typedef AbstractRefProcTaskExecutor::ProcessTask ProcessTask; ProcessTask & _rp_task; uint _work_id; public: PSRefProcTaskProxy(ProcessTask & rp_task, uint work_id) : _rp_task(rp_task), _work_id(work_id) { } private: virtual char* name() { return (char *)"Process referents by policy in parallel"; } virtual void do_it(GCTaskManager* manager, uint which); }; void PSRefProcTaskProxy::do_it(GCTaskManager* manager, uint which) { PSPromotionManager* promotion_manager = PSPromotionManager::gc_thread_promotion_manager(which); assert(promotion_manager != NULL, "sanity check"); PSKeepAliveClosure keep_alive(promotion_manager); PSEvacuateFollowersClosure evac_followers(promotion_manager); PSIsAliveClosure is_alive; _rp_task.work(_work_id, is_alive, keep_alive, evac_followers); } class PSRefEnqueueTaskProxy: public GCTask { typedef AbstractRefProcTaskExecutor::EnqueueTask EnqueueTask; EnqueueTask& _enq_task; uint _work_id; public: PSRefEnqueueTaskProxy(EnqueueTask& enq_task, uint work_id) : _enq_task(enq_task), _work_id(work_id) { } virtual char* name() { return (char *)"Enqueue reference objects in parallel"; } virtual void do_it(GCTaskManager* manager, uint which) { _enq_task.work(_work_id); } }; class PSRefProcTaskExecutor: public AbstractRefProcTaskExecutor { virtual void execute(ProcessTask& task); virtual void execute(EnqueueTask& task); }; void PSRefProcTaskExecutor::execute(ProcessTask& task) { GCTaskQueue* q = GCTaskQueue::create(); for(uint i=0; i<ParallelGCThreads; i++) { q->enqueue(new PSRefProcTaskProxy(task, i)); } ParallelTaskTerminator terminator( ParallelScavengeHeap::gc_task_manager()->workers(), (TaskQueueSetSuper*) PSPromotionManager::stack_array_depth()); if (task.marks_oops_alive() && ParallelGCThreads > 1) { for (uint j=0; j<ParallelGCThreads; j++) { q->enqueue(new StealTask(&terminator)); } } ParallelScavengeHeap::gc_task_manager()->execute_and_wait(q); } void PSRefProcTaskExecutor::execute(EnqueueTask& task) { GCTaskQueue* q = GCTaskQueue::create(); for(uint i=0; i<ParallelGCThreads; i++) { q->enqueue(new PSRefEnqueueTaskProxy(task, i)); } ParallelScavengeHeap::gc_task_manager()->execute_and_wait(q); } // This method contains all heap specific policy for invoking scavenge. // PSScavenge::invoke_no_policy() will do nothing but attempt to // scavenge. It will not clean up after failed promotions, bail out if // we've exceeded policy time limits, or any other special behavior. // All such policy should be placed here. // // Note that this method should only be called from the vm_thread while // at a safepoint! void PSScavenge::invoke() { assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint"); assert(Thread::current() == (Thread*)VMThread::vm_thread(), "should be in vm thread"); assert(!Universe::heap()->is_gc_active(), "not reentrant"); ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity"); PSAdaptiveSizePolicy* policy = heap->size_policy(); IsGCActiveMark mark; bool scavenge_was_done = PSScavenge::invoke_no_policy(); PSGCAdaptivePolicyCounters* counters = heap->gc_policy_counters(); if (UsePerfData) counters->update_full_follows_scavenge(0); if (!scavenge_was_done || policy->should_full_GC(heap->old_gen()->free_in_bytes())) { if (UsePerfData) counters->update_full_follows_scavenge(full_follows_scavenge); GCCauseSetter gccs(heap, GCCause::_adaptive_size_policy); CollectorPolicy* cp = heap->collector_policy(); const bool clear_all_softrefs = cp->should_clear_all_soft_refs(); if (UseParallelOldGC) { PSParallelCompact::invoke_no_policy(clear_all_softrefs); } else { PSMarkSweep::invoke_no_policy(clear_all_softrefs); } } } // This method contains no policy. You should probably // be calling invoke() instead. bool PSScavenge::invoke_no_policy() { assert(SafepointSynchronize::is_at_safepoint(), "should be at safepoint"); assert(Thread::current() == (Thread*)VMThread::vm_thread(), "should be in vm thread"); assert(_preserved_mark_stack.is_empty(), "should be empty"); assert(_preserved_oop_stack.is_empty(), "should be empty"); TimeStamp scavenge_entry; TimeStamp scavenge_midpoint; TimeStamp scavenge_exit; scavenge_entry.update(); if (GC_locker::check_active_before_gc()) { return false; } ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); GCCause::Cause gc_cause = heap->gc_cause(); assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity"); // Check for potential problems. if (!should_attempt_scavenge()) { return false; } bool promotion_failure_occurred = false; PSYoungGen* young_gen = heap->young_gen(); PSOldGen* old_gen = heap->old_gen(); PSPermGen* perm_gen = heap->perm_gen(); PSAdaptiveSizePolicy* size_policy = heap->size_policy(); heap->increment_total_collections(); AdaptiveSizePolicyOutput(size_policy, heap->total_collections()); if ((gc_cause != GCCause::_java_lang_system_gc) || UseAdaptiveSizePolicyWithSystemGC) { // Gather the feedback data for eden occupancy. young_gen->eden_space()->accumulate_statistics(); } if (ZapUnusedHeapArea) { // Save information needed to minimize mangling heap->record_gen_tops_before_GC(); } if (PrintHeapAtGC) { Universe::print_heap_before_gc(); } assert(!NeverTenure || _tenuring_threshold == markOopDesc::max_age + 1, "Sanity"); assert(!AlwaysTenure || _tenuring_threshold == 0, "Sanity"); size_t prev_used = heap->used(); assert(promotion_failed() == false, "Sanity"); // Fill in TLABs heap->accumulate_statistics_all_tlabs(); heap->ensure_parsability(true); // retire TLABs if (VerifyBeforeGC && heap->total_collections() >= VerifyGCStartAt) { HandleMark hm; // Discard invalid handles created during verification gclog_or_tty->print(" VerifyBeforeGC:"); Universe::verify(true); } { ResourceMark rm; HandleMark hm; gclog_or_tty->date_stamp(PrintGC && PrintGCDateStamps); TraceCPUTime tcpu(PrintGCDetails, true, gclog_or_tty); TraceTime t1("GC", PrintGC, !PrintGCDetails, gclog_or_tty); TraceCollectorStats tcs(counters()); TraceMemoryManagerStats tms(false /* not full GC */,gc_cause); if (TraceGen0Time) accumulated_time()->start(); // Let the size policy know we're starting size_policy->minor_collection_begin(); // Verify the object start arrays. if (VerifyObjectStartArray && VerifyBeforeGC) { old_gen->verify_object_start_array(); perm_gen->verify_object_start_array(); } // Verify no unmarked old->young roots if (VerifyRememberedSets) { CardTableExtension::verify_all_young_refs_imprecise(); } if (!ScavengeWithObjectsInToSpace) { assert(young_gen->to_space()->is_empty(), "Attempt to scavenge with live objects in to_space"); young_gen->to_space()->clear(SpaceDecorator::Mangle); } else if (ZapUnusedHeapArea) { young_gen->to_space()->mangle_unused_area(); } save_to_space_top_before_gc(); NOT_PRODUCT(reference_processor()->verify_no_references_recorded()); COMPILER2_PRESENT(DerivedPointerTable::clear()); reference_processor()->enable_discovery(); reference_processor()->setup_policy(false); // We track how much was promoted to the next generation for // the AdaptiveSizePolicy. size_t old_gen_used_before = old_gen->used_in_bytes(); // For PrintGCDetails size_t young_gen_used_before = young_gen->used_in_bytes(); // Reset our survivor overflow. set_survivor_overflow(false); // We need to save the old/perm top values before // creating the promotion_manager. We pass the top // values to the card_table, to prevent it from // straying into the promotion labs. HeapWord* old_top = old_gen->object_space()->top(); HeapWord* perm_top = perm_gen->object_space()->top(); // Release all previously held resources gc_task_manager()->release_all_resources(); PSPromotionManager::pre_scavenge(); // We'll use the promotion manager again later. PSPromotionManager* promotion_manager = PSPromotionManager::vm_thread_promotion_manager(); { // TraceTime("Roots"); ParallelScavengeHeap::ParStrongRootsScope psrs; GCTaskQueue* q = GCTaskQueue::create(); for(uint i=0; i<ParallelGCThreads; i++) { q->enqueue(new OldToYoungRootsTask(old_gen, old_top, i)); } q->enqueue(new SerialOldToYoungRootsTask(perm_gen, perm_top)); q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::universe)); q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::jni_handles)); // We scan the thread roots in parallel Threads::create_thread_roots_tasks(q); q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::object_synchronizer)); q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::flat_profiler)); q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::management)); q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::system_dictionary)); q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::jvmti)); q->enqueue(new ScavengeRootsTask(ScavengeRootsTask::code_cache)); ParallelTaskTerminator terminator( gc_task_manager()->workers(), (TaskQueueSetSuper*) promotion_manager->stack_array_depth()); if (ParallelGCThreads>1) { for (uint j=0; j<ParallelGCThreads; j++) { q->enqueue(new StealTask(&terminator)); } } gc_task_manager()->execute_and_wait(q); } scavenge_midpoint.update(); // Process reference objects discovered during scavenge { reference_processor()->setup_policy(false); // not always_clear PSKeepAliveClosure keep_alive(promotion_manager); PSEvacuateFollowersClosure evac_followers(promotion_manager); if (reference_processor()->processing_is_mt()) { PSRefProcTaskExecutor task_executor; reference_processor()->process_discovered_references( &_is_alive_closure, &keep_alive, &evac_followers, &task_executor); } else { reference_processor()->process_discovered_references( &_is_alive_closure, &keep_alive, &evac_followers, NULL); } } // Enqueue reference objects discovered during scavenge. if (reference_processor()->processing_is_mt()) { PSRefProcTaskExecutor task_executor; reference_processor()->enqueue_discovered_references(&task_executor); } else { reference_processor()->enqueue_discovered_references(NULL); } if (!JavaObjectsInPerm) { // Unlink any dead interned Strings StringTable::unlink(&_is_alive_closure); // Process the remaining live ones PSScavengeRootsClosure root_closure(promotion_manager); StringTable::oops_do(&root_closure); } // Finally, flush the promotion_manager's labs, and deallocate its stacks. PSPromotionManager::post_scavenge(); promotion_failure_occurred = promotion_failed(); if (promotion_failure_occurred) { clean_up_failed_promotion(); if (PrintGC) { gclog_or_tty->print("--"); } } // Let the size policy know we're done. Note that we count promotion // failure cleanup time as part of the collection (otherwise, we're // implicitly saying it's mutator time). size_policy->minor_collection_end(gc_cause); if (!promotion_failure_occurred) { // Swap the survivor spaces. young_gen->eden_space()->clear(SpaceDecorator::Mangle); young_gen->from_space()->clear(SpaceDecorator::Mangle); young_gen->swap_spaces(); size_t survived = young_gen->from_space()->used_in_bytes(); size_t promoted = old_gen->used_in_bytes() - old_gen_used_before; size_policy->update_averages(_survivor_overflow, survived, promoted); // A successful scavenge should restart the GC time limit count which is // for full GC's. size_policy->reset_gc_overhead_limit_count(); if (UseAdaptiveSizePolicy) { // Calculate the new survivor size and tenuring threshold if (PrintAdaptiveSizePolicy) { gclog_or_tty->print("AdaptiveSizeStart: "); gclog_or_tty->stamp(); gclog_or_tty->print_cr(" collection: %d ", heap->total_collections()); if (Verbose) { gclog_or_tty->print("old_gen_capacity: %d young_gen_capacity: %d" " perm_gen_capacity: %d ", old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes(), perm_gen->capacity_in_bytes()); } } if (UsePerfData) { PSGCAdaptivePolicyCounters* counters = heap->gc_policy_counters(); counters->update_old_eden_size( size_policy->calculated_eden_size_in_bytes()); counters->update_old_promo_size( size_policy->calculated_promo_size_in_bytes()); counters->update_old_capacity(old_gen->capacity_in_bytes()); counters->update_young_capacity(young_gen->capacity_in_bytes()); counters->update_survived(survived); counters->update_promoted(promoted); counters->update_survivor_overflowed(_survivor_overflow); } size_t survivor_limit = size_policy->max_survivor_size(young_gen->max_size()); _tenuring_threshold = size_policy->compute_survivor_space_size_and_threshold( _survivor_overflow, _tenuring_threshold, survivor_limit); if (PrintTenuringDistribution) { gclog_or_tty->cr(); gclog_or_tty->print_cr("Desired survivor size %ld bytes, new threshold %d (max %d)", size_policy->calculated_survivor_size_in_bytes(), _tenuring_threshold, MaxTenuringThreshold); } if (UsePerfData) { PSGCAdaptivePolicyCounters* counters = heap->gc_policy_counters(); counters->update_tenuring_threshold(_tenuring_threshold); counters->update_survivor_size_counters(); } // Do call at minor collections? // Don't check if the size_policy is ready at this // level. Let the size_policy check that internally. if (UseAdaptiveSizePolicy && UseAdaptiveGenerationSizePolicyAtMinorCollection && ((gc_cause != GCCause::_java_lang_system_gc) || UseAdaptiveSizePolicyWithSystemGC)) { // Calculate optimial free space amounts assert(young_gen->max_size() > young_gen->from_space()->capacity_in_bytes() + young_gen->to_space()->capacity_in_bytes(), "Sizes of space in young gen are out-of-bounds"); size_t max_eden_size = young_gen->max_size() - young_gen->from_space()->capacity_in_bytes() - young_gen->to_space()->capacity_in_bytes(); size_policy->compute_generation_free_space(young_gen->used_in_bytes(), young_gen->eden_space()->used_in_bytes(), old_gen->used_in_bytes(), perm_gen->used_in_bytes(), young_gen->eden_space()->capacity_in_bytes(), old_gen->max_gen_size(), max_eden_size, false /* full gc*/, gc_cause, heap->collector_policy()); } // Resize the young generation at every collection // even if new sizes have not been calculated. This is // to allow resizes that may have been inhibited by the // relative location of the "to" and "from" spaces. // Resizing the old gen at minor collects can cause increases // that don't feed back to the generation sizing policy until // a major collection. Don't resize the old gen here. heap->resize_young_gen(size_policy->calculated_eden_size_in_bytes(), size_policy->calculated_survivor_size_in_bytes()); if (PrintAdaptiveSizePolicy) { gclog_or_tty->print_cr("AdaptiveSizeStop: collection: %d ", heap->total_collections()); } } // Update the structure of the eden. With NUMA-eden CPU hotplugging or offlining can // cause the change of the heap layout. Make sure eden is reshaped if that's the case. // Also update() will case adaptive NUMA chunk resizing. assert(young_gen->eden_space()->is_empty(), "eden space should be empty now"); young_gen->eden_space()->update(); heap->gc_policy_counters()->update_counters(); heap->resize_all_tlabs(); assert(young_gen->to_space()->is_empty(), "to space should be empty now"); } COMPILER2_PRESENT(DerivedPointerTable::update_pointers()); NOT_PRODUCT(reference_processor()->verify_no_references_recorded()); // Re-verify object start arrays if (VerifyObjectStartArray && VerifyAfterGC) { old_gen->verify_object_start_array(); perm_gen->verify_object_start_array(); } // Verify all old -> young cards are now precise if (VerifyRememberedSets) { // Precise verification will give false positives. Until this is fixed, // use imprecise verification. // CardTableExtension::verify_all_young_refs_precise(); CardTableExtension::verify_all_young_refs_imprecise(); } if (TraceGen0Time) accumulated_time()->stop(); if (PrintGC) { if (PrintGCDetails) { // Don't print a GC timestamp here. This is after the GC so // would be confusing. young_gen->print_used_change(young_gen_used_before); } heap->print_heap_change(prev_used); } // Track memory usage and detect low memory MemoryService::track_memory_usage(); heap->update_counters(); } if (VerifyAfterGC && heap->total_collections() >= VerifyGCStartAt) { HandleMark hm; // Discard invalid handles created during verification gclog_or_tty->print(" VerifyAfterGC:"); Universe::verify(false); } if (PrintHeapAtGC) { Universe::print_heap_after_gc(); } if (ZapUnusedHeapArea) { young_gen->eden_space()->check_mangled_unused_area_complete(); young_gen->from_space()->check_mangled_unused_area_complete(); young_gen->to_space()->check_mangled_unused_area_complete(); } scavenge_exit.update(); if (PrintGCTaskTimeStamps) { tty->print_cr("VM-Thread " INT64_FORMAT " " INT64_FORMAT " " INT64_FORMAT, scavenge_entry.ticks(), scavenge_midpoint.ticks(), scavenge_exit.ticks()); gc_task_manager()->print_task_time_stamps(); } #ifdef TRACESPINNING ParallelTaskTerminator::print_termination_counts(); #endif return !promotion_failure_occurred; } // This method iterates over all objects in the young generation, // unforwarding markOops. It then restores any preserved mark oops, // and clears the _preserved_mark_stack. void PSScavenge::clean_up_failed_promotion() { ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity"); assert(promotion_failed(), "Sanity"); PSYoungGen* young_gen = heap->young_gen(); { ResourceMark rm; // Unforward all pointers in the young gen. PSPromotionFailedClosure unforward_closure; young_gen->object_iterate(&unforward_closure); if (PrintGC && Verbose) { gclog_or_tty->print_cr("Restoring %d marks", _preserved_oop_stack.size()); } // Restore any saved marks. while (!_preserved_oop_stack.is_empty()) { oop obj = _preserved_oop_stack.pop(); markOop mark = _preserved_mark_stack.pop(); obj->set_mark(mark); } // Clear the preserved mark and oop stack caches. _preserved_mark_stack.clear(true); _preserved_oop_stack.clear(true); _promotion_failed = false; } // Reset the PromotionFailureALot counters. NOT_PRODUCT(Universe::heap()->reset_promotion_should_fail();) } // This method is called whenever an attempt to promote an object // fails. Some markOops will need preservation, some will not. Note // that the entire eden is traversed after a failed promotion, with // all forwarded headers replaced by the default markOop. This means // it is not neccessary to preserve most markOops. void PSScavenge::oop_promotion_failed(oop obj, markOop obj_mark) { _promotion_failed = true; if (obj_mark->must_be_preserved_for_promotion_failure(obj)) { // Should use per-worker private stakcs hetre rather than // locking a common pair of stacks. ThreadCritical tc; _preserved_oop_stack.push(obj); _preserved_mark_stack.push(obj_mark); } } bool PSScavenge::should_attempt_scavenge() { ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity"); PSGCAdaptivePolicyCounters* counters = heap->gc_policy_counters(); if (UsePerfData) { counters->update_scavenge_skipped(not_skipped); } PSYoungGen* young_gen = heap->young_gen(); PSOldGen* old_gen = heap->old_gen(); if (!ScavengeWithObjectsInToSpace) { // Do not attempt to promote unless to_space is empty if (!young_gen->to_space()->is_empty()) { _consecutive_skipped_scavenges++; if (UsePerfData) { counters->update_scavenge_skipped(to_space_not_empty); } return false; } } // Test to see if the scavenge will likely fail. PSAdaptiveSizePolicy* policy = heap->size_policy(); // A similar test is done in the policy's should_full_GC(). If this is // changed, decide if that test should also be changed. size_t avg_promoted = (size_t) policy->padded_average_promoted_in_bytes(); size_t promotion_estimate = MIN2(avg_promoted, young_gen->used_in_bytes()); bool result = promotion_estimate < old_gen->free_in_bytes(); if (PrintGCDetails && Verbose) { gclog_or_tty->print(result ? " do scavenge: " : " skip scavenge: "); gclog_or_tty->print_cr(" average_promoted " SIZE_FORMAT " padded_average_promoted " SIZE_FORMAT " free in old gen " SIZE_FORMAT, (size_t) policy->average_promoted_in_bytes(), (size_t) policy->padded_average_promoted_in_bytes(), old_gen->free_in_bytes()); if (young_gen->used_in_bytes() < (size_t) policy->padded_average_promoted_in_bytes()) { gclog_or_tty->print_cr(" padded_promoted_average is greater" " than maximum promotion = " SIZE_FORMAT, young_gen->used_in_bytes()); } } if (result) { _consecutive_skipped_scavenges = 0; } else { _consecutive_skipped_scavenges++; if (UsePerfData) { counters->update_scavenge_skipped(promoted_too_large); } } return result; } // Used to add tasks GCTaskManager* const PSScavenge::gc_task_manager() { assert(ParallelScavengeHeap::gc_task_manager() != NULL, "shouldn't return NULL"); return ParallelScavengeHeap::gc_task_manager(); } void PSScavenge::initialize() { // Arguments must have been parsed if (AlwaysTenure) { _tenuring_threshold = 0; } else if (NeverTenure) { _tenuring_threshold = markOopDesc::max_age + 1; } else { // We want to smooth out our startup times for the AdaptiveSizePolicy _tenuring_threshold = (UseAdaptiveSizePolicy) ? InitialTenuringThreshold : MaxTenuringThreshold; } ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity"); PSYoungGen* young_gen = heap->young_gen(); PSOldGen* old_gen = heap->old_gen(); PSPermGen* perm_gen = heap->perm_gen(); // Set boundary between young_gen and old_gen assert(perm_gen->reserved().end() <= old_gen->object_space()->bottom(), "perm above old"); assert(old_gen->reserved().end() <= young_gen->eden_space()->bottom(), "old above young"); _young_generation_boundary = young_gen->eden_space()->bottom(); // Initialize ref handling object for scavenging. MemRegion mr = young_gen->reserved(); _ref_processor = new ReferenceProcessor(mr, // span ParallelRefProcEnabled && (ParallelGCThreads > 1), // mt processing (int) ParallelGCThreads, // mt processing degree true, // mt discovery (int) ParallelGCThreads, // mt discovery degree true, // atomic_discovery NULL, // header provides liveness info false); // next field updates do not need write barrier // Cache the cardtable BarrierSet* bs = Universe::heap()->barrier_set(); assert(bs->kind() == BarrierSet::CardTableModRef, "Wrong barrier set kind"); _card_table = (CardTableExtension*)bs; _counters = new CollectorCounters("PSScavenge", 0); }
ikeji/openjdk7-hotspot
src/share/vm/gc_implementation/parallelScavenge/psScavenge.cpp
C++
gpl-2.0
30,816
<?PHP // THIS FILE IS DEPRECATED! PLEASE DO NOT MAKE CHANGES TO IT! // // IT IS USED ONLY FOR UPGRADES FROM BEFORE MOODLE 1.7, ALL // LATER CHANGES SHOULD USE upgrade.php IN THIS DIRECTORY. function wiki_upgrade($oldversion) { /// This function does anything necessary to upgrade /// older versions to match current functionality global $CFG, $db; if ($oldversion < 2004040200) { execute_sql('ALTER TABLE `'.$CFG->prefix.'wiki` DROP `allowstudentstowiki`'); } if ($oldversion < 2004040700) { execute_sql('ALTER TABLE `'.$CFG->prefix.'wiki` CHANGE `ewikiallowsafehtml` `htmlmode` TINYINT( 4 ) DEFAULT \'0\' NOT NULL'); } if ($oldversion < 2004042100) { execute_sql('ALTER TABLE `'.$CFG->prefix.'wiki` ADD `pagename` VARCHAR( 255 ) AFTER `summary`'); execute_sql('ALTER TABLE `'.$CFG->prefix.'wiki_entries` CHANGE `name` `pagename` VARCHAR( 255 ) NOT NULL'); if ($wikis = get_records('wiki')) { foreach ($wikis as $wiki) { if (empty($wiki->pagename)) { set_field('wiki', 'pagename', $wiki->name, 'id', $wiki->id); } } } } if ($oldversion < 2004053100) { execute_sql('ALTER TABLE `'.$CFG->prefix.'wiki` CHANGE `initialcontent` `initialcontent` VARCHAR( 255 ) NOT NULL DEFAULT \'\''); // Remove obsolete 'initialcontent' values. if ($wikis = get_records('wiki')) { foreach ($wikis as $wiki) { if (!empty($wiki->initialcontent)) { set_field('wiki', 'initialcontent', null, 'id', $wiki->id); } } } } if ($oldversion < 2004061300) { execute_sql('ALTER TABLE `'.$CFG->prefix.'wiki`' .' ADD `setpageflags` TINYINT DEFAULT \'1\' NOT NULL AFTER `ewikiacceptbinary`,' .' ADD `strippages` TINYINT DEFAULT \'1\' NOT NULL AFTER `setpageflags`,' .' ADD `removepages` TINYINT DEFAULT \'1\' NOT NULL AFTER `strippages`,' .' ADD `revertchanges` TINYINT DEFAULT \'1\' NOT NULL AFTER `removepages`'); } if ($oldversion < 2004062400) { execute_sql('ALTER TABLE `'.$CFG->prefix.'wiki`' .' ADD `disablecamelcase` TINYINT DEFAULT \'0\' NOT NULL AFTER `ewikiacceptbinary`'); } if ($oldversion < 2004082200) { table_column('wiki_pages', '', 'userid', "integer", "10", "unsigned", "0", "not null", "author"); } if ($oldversion < 2004082303) { // Try to update userid for old records if ($pages = get_records('wiki_pages', 'userid', 0, 'pagename', 'lastmodified,author,pagename,version')) { foreach ($pages as $page) { $name = explode('(', $page->author); $name = trim($name[0]); $name = explode(' ', $name); $firstname = $name[0]; unset($name[0]); $lastname = trim(implode(' ', $name)); if ($user = get_record('user', 'firstname', $firstname, 'lastname', $lastname)) { set_field('wiki_pages', 'userid', $user->id, 'pagename', addslashes($page->pagename), 'version', $page->version); } } } } if ($oldversion < 2004111200) { execute_sql("ALTER TABLE {$CFG->prefix}wiki DROP INDEX course;",false); execute_sql("ALTER TABLE {$CFG->prefix}wiki_entries DROP INDEX course;",false); execute_sql("ALTER TABLE {$CFG->prefix}wiki_entries DROP INDEX userid;",false); execute_sql("ALTER TABLE {$CFG->prefix}wiki_entries DROP INDEX groupid;",false); execute_sql("ALTER TABLE {$CFG->prefix}wiki_entries DROP INDEX wikiid;",false); execute_sql("ALTER TABLE {$CFG->prefix}wiki_entries DROP INDEX pagename;",false); modify_database('','ALTER TABLE prefix_wiki ADD INDEX course (course);'); modify_database('','ALTER TABLE prefix_wiki_entries ADD INDEX course (course);'); modify_database('','ALTER TABLE prefix_wiki_entries ADD INDEX userid (userid);'); modify_database('','ALTER TABLE prefix_wiki_entries ADD INDEX groupid (groupid);'); modify_database('','ALTER TABLE prefix_wiki_entries ADD INDEX wikiid (wikiid);'); modify_database('','ALTER TABLE prefix_wiki_entries ADD INDEX pagename (pagename);'); } if ($oldversion < 2005022000) { // recreating the wiki_pages table completelly (missing id, bug 2608) if ($rows = count_records("wiki_pages")) { // we need to use the temp stuff modify_database("","CREATE TABLE `prefix_wiki_pages_tmp` ( `pagename` VARCHAR(160) NOT NULL, `version` INT(10) UNSIGNED NOT NULL DEFAULT 0, `flags` INT(10) UNSIGNED DEFAULT 0, `content` MEDIUMTEXT, `author` VARCHAR(100) DEFAULT 'ewiki', `userid` INT(10) UNSIGNED NOT NULL DEFAULT 0, `created` INT(10) UNSIGNED DEFAULT 0, `lastmodified` INT(10) UNSIGNED DEFAULT 0, `refs` MEDIUMTEXT, `meta` MEDIUMTEXT, `hits` INT(10) UNSIGNED DEFAULT 0, `wiki` INT(10) UNSIGNED NOT NULL);"); execute_sql("INSERT INTO {$CFG->prefix}wiki_pages_tmp (pagename, version, flags, content, author, userid, created, lastmodified, refs, meta, hits, wiki) SELECT pagename, version, flags, content, author, userid, created, lastmodified, refs, meta, hits, wiki FROM {$CFG->prefix}wiki_pages"); $insertafter = true; } execute_sql("DROP TABLE {$CFG->prefix}wiki_pages"); modify_database("","CREATE TABLE `prefix_wiki_pages` ( `id` INT(10) UNSIGNED NOT NULL AUTO_INCREMENT, `pagename` VARCHAR(160) NOT NULL, `version` INT(10) UNSIGNED NOT NULL DEFAULT 0, `flags` INT(10) UNSIGNED DEFAULT 0, `content` MEDIUMTEXT, `author` VARCHAR(100) DEFAULT 'ewiki', `userid` INT(10) UNSIGNED NOT NULL DEFAULT 0, `created` INT(10) UNSIGNED DEFAULT 0, `lastmodified` INT(10) UNSIGNED DEFAULT 0, `refs` MEDIUMTEXT, `meta` MEDIUMTEXT, `hits` INT(10) UNSIGNED DEFAULT 0, `wiki` INT(10) UNSIGNED NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `wiki_pages_uk` (`pagename`,`version`,`wiki`)) TYPE=MyISAM COMMENT='Holds the Wiki-Pages';"); if (!empty($insertafter)) { execute_sql("INSERT INTO {$CFG->prefix}wiki_pages (pagename, version, flags, content, author, userid, created, lastmodified, refs, meta, hits, wiki) SELECT pagename, version, flags, content, author, userid, created, lastmodified, refs, meta, hits, wiki FROM {$CFG->prefix}wiki_pages_tmp"); execute_sql("DROP TABLE {$CFG->prefix}wiki_pages_tmp"); } } if ($oldversion < 2006042800) { execute_sql("UPDATE {$CFG->prefix}wiki SET summary='' WHERE summary IS NULL"); table_column('wiki','summary','summary','text','','','','not null'); execute_sql("UPDATE {$CFG->prefix}wiki SET pagename='' WHERE pagename IS NULL"); table_column('wiki','pagename','pagename','varchar','255','','','not null'); execute_sql("UPDATE {$CFG->prefix}wiki SET initialcontent='' WHERE initialcontent IS NULL"); table_column('wiki','initialcontent','initialcontent','varchar','255','','','not null'); } if ($oldversion < 2006092502) { modify_database(""," CREATE TABLE prefix_wiki_locks ( id INT(10) UNSIGNED NOT NULL AUTO_INCREMENT, wikiid INT(10) UNSIGNED NOT NULL, pagename VARCHAR(160) NOT NULL DEFAULT '', lockedby INT(10) NOT NULL DEFAULT 0, lockedsince INT(10) NOT NULL DEFAULT 0, lockedseen INT(10) NOT NULL DEFAULT 0, PRIMARY KEY(id), UNIQUE INDEX wiki_locks_uk(wikiid,pagename), INDEX wiki_locks_ix(lockedseen) );"); } ////// DO NOT ADD NEW THINGS HERE!! USE upgrade.php and the lib/ddllib.php functions. return true; } ?>
xavilal/moodle
mod/wiki/db/mysql.php
PHP
gpl-3.0
8,723
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@zend.com so we can send you a copy immediately. * * @category Zend * @package Zend_Http * @subpackage UserAgent * @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ /** * Zend_Http_UserAgent_Features_Adapter_Interface */ require_once 'Zend/Http/UserAgent/Features/Adapter.php'; /** * Features adapter build with the Tera Wurfl Api * See installation instruction here : http://deviceatlas.com/licences * Download : http://deviceatlas.com/getAPI/php * * @package Zend_Http * @subpackage UserAgent * @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ class Zend_Http_UserAgent_Features_Adapter_DeviceAtlas implements Zend_Http_UserAgent_Features_Adapter { /** * Get features from request * * @param array $request $_SERVER variable * @return array */ public static function getFromRequest($request, array $config) { if (!class_exists('Mobi_Mtld_DA_Api')) { if (!isset($config['deviceatlas'])) { require_once 'Zend/Http/UserAgent/Features/Exception.php'; throw new Zend_Http_UserAgent_Features_Exception('"DeviceAtlas" configuration is not defined'); } } $config = $config['deviceatlas']; if (!class_exists('Mobi_Mtld_DA_Api')) { if (empty($config['deviceatlas_lib_dir'])) { require_once 'Zend/Http/UserAgent/Features/Exception.php'; throw new Zend_Http_UserAgent_Features_Exception('The "deviceatlas_lib_dir" parameter is not defined'); } // Include the Device Atlas file from the specified lib_dir require_once ($config['deviceatlas_lib_dir'] . '/Mobi/Mtld/DA/Api.php'); } if (empty($config['deviceatlas_data'])) { require_once 'Zend/Http/UserAgent/Features/Exception.php'; throw new Zend_Http_UserAgent_Features_Exception('The "deviceatlas_data" parameter is not defined'); } //load the device data-tree : e.g. 'json/DeviceAtlas.json $tree = Mobi_Mtld_DA_Api::getTreeFromFile($config['deviceatlas_data']); $properties = Mobi_Mtld_DA_Api::getProperties($tree, $request['http_user_agent']); return $properties; } }
mindvalley/kensei
system/vendor/Zend/Http/UserAgent/Features/Adapter/DeviceAtlas.php
PHP
agpl-3.0
2,924
module RGen module MetamodelBuilder module DataTypes # An enum object is used to describe possible attribute values within a # MetamodelBuilder attribute definition. An attribute defined this way can only # take the values specified when creating the Enum object. # Literal values can only be symbols or true or false. # Optionally a name may be specified for the enum object. # # Examples: # # Enum.new(:name => "AnimalEnum", :literals => [:cat, :dog]) # Enum.new(:literals => [:cat, :dog]) # Enum.new([:cat, :dog]) # class Enum attr_reader :name, :literals # Creates a new named enum type object consisting of the elements passed as arguments. def initialize(params) MetamodelBuilder::ConstantOrderHelper.enumCreated(self) if params.is_a?(Array) @literals = params @name = "anonymous" elsif params.is_a?(Hash) raise StandardError.new("Hash entry :literals is missing") unless params[:literals] @literals = params[:literals] @name = params[:name] || "anonymous" else raise StandardError.new("Pass an Array or a Hash") end end # This method can be used to check if an object can be used as value for # variables having this enum object as type. def validLiteral?(l) literals.include?(l) end def literals_as_strings literals.collect do |l| if l.is_a?(Symbol) if l.to_s =~ /^\d|\W/ ":'"+l.to_s+"'" else ":"+l.to_s end elsif l.is_a?(TrueClass) || l.is_a?(FalseClass) l.to_s else raise StandardError.new("Literal values can only be symbols or true/false") end end end def to_s # :nodoc: name end end # Boolean is a predefined enum object having Ruby's true and false singletons # as possible values. Boolean = Enum.new(:name => "Boolean", :literals => [true, false]) # Long represents a 64-bit Integer # This constant is merely a marker for keeping this information in the Ruby version of the metamodel, # values of this type will always be instances of Integer or Bignum; # Setting it to a string value ensures that it responds to "to_s" which is used in the metamodel generator Long = "Long" end end end
nwops/puppet-retrospec
vendor/pup410/lib/puppet/vendor/rgen/lib/rgen/metamodel_builder/data_types.rb
Ruby
agpl-3.0
2,338
package com.puppycrawl.tools.checkstyle.checks.whitespace.emptylineseparator; public class InputEmptyLineSeparatorPrePreviousLineEmptiness { }
AkshitaKukreja30/checkstyle
src/test/resources/com/puppycrawl/tools/checkstyle/checks/whitespace/emptylineseparator/InputEmptyLineSeparatorPrePreviousLineEmptiness.java
Java
lgpl-2.1
146
package route53 import ( "net/url" "regexp" "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/aws/client" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/private/protocol/restxml" ) func init() { initClient = func(c *client.Client) { c.Handlers.Build.PushBack(sanitizeURL) } initRequest = func(r *request.Request) { switch r.Operation.Name { case opChangeResourceRecordSets: r.Handlers.UnmarshalError.Remove(restxml.UnmarshalErrorHandler) r.Handlers.UnmarshalError.PushBack(unmarshalChangeResourceRecordSetsError) } } } var reSanitizeURL = regexp.MustCompile(`\/%2F\w+%2F`) func sanitizeURL(r *request.Request) { r.HTTPRequest.URL.RawPath = reSanitizeURL.ReplaceAllString(r.HTTPRequest.URL.RawPath, "/") // Update Path so that it reflects the cleaned RawPath updated, err := url.Parse(r.HTTPRequest.URL.RawPath) if err != nil { r.Error = awserr.New(request.ErrCodeSerialization, "failed to clean Route53 URL", err) return } // Take the updated path so the requests's URL Path has parity with RawPath. r.HTTPRequest.URL.Path = updated.Path }
kubernetes/kops
vendor/github.com/aws/aws-sdk-go/service/route53/customizations.go
GO
apache-2.0
1,121
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apiman.gateway.engine.es.beans; /** * Used to store a primitive value into the shared state ES document. * * @author eric.wittmann@redhat.com */ public class PrimitiveBean { private String value; private String type; /** * Constructor. */ public PrimitiveBean() { } /** * @return the value */ public String getValue() { return value; } /** * @param value the value to set */ public void setValue(String value) { this.value = value; } /** * @return the type */ public String getType() { return type; } /** * @param type the type to set */ public void setType(String type) { this.type = type; } }
jasonchaffee/apiman
gateway/engine/es/src/main/java/io/apiman/gateway/engine/es/beans/PrimitiveBean.java
Java
apache-2.0
1,370
/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package system import ( "github.com/golang/glog" "k8s.io/kubernetes/pkg/util/errors" ) // Validator is the interface for all validators. type Validator interface { // Name is the name of the validator. Name() string // Validate is the validate function. Validate(SysSpec) error } // validators are all the validators. var validators = []Validator{ &OSValidator{}, &KernelValidator{}, &CgroupsValidator{}, &DockerValidator{}, } // Validate uses all validators to validate the system. func Validate() error { var errs []error spec := DefaultSysSpec for _, v := range validators { glog.Infof("Validating %s...", v.Name()) errs = append(errs, v.Validate(spec)) } return errors.NewAggregate(errs) }
rawlingsj/gofabric8
vendor/k8s.io/kubernetes/test/e2e_node/system/validators.go
GO
apache-2.0
1,288
package com.thinkaurelius.titan.hadoop.compat.h1; import com.thinkaurelius.titan.graphdb.configuration.TitanConstants; import com.thinkaurelius.titan.hadoop.config.job.JobClasspathConfigurer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskInputOutputContext; import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver; import com.thinkaurelius.titan.hadoop.HadoopGraph; import com.thinkaurelius.titan.hadoop.compat.HadoopCompat; import com.thinkaurelius.titan.hadoop.compat.HadoopCompiler; public class Hadoop1Compat implements HadoopCompat { static final String CFG_SPECULATIVE_MAPS = "mapred.map.tasks.speculative.execution"; static final String CFG_SPECULATIVE_REDUCES = "mapred.reduce.tasks.speculative.execution"; static final String CFG_JOB_JAR = "mapred.jar"; @Override public HadoopCompiler newCompiler(HadoopGraph g) { return new Hadoop1Compiler(g); } @Override public TaskAttemptContext newTask(Configuration c, TaskAttemptID t) { return new TaskAttemptContext(c, t); } @Override public String getSpeculativeMapConfigKey() { return CFG_SPECULATIVE_MAPS; } @Override public String getSpeculativeReduceConfigKey() { return CFG_SPECULATIVE_REDUCES; } @Override public String getMapredJarConfigKey() { return CFG_JOB_JAR; } @Override public void incrementContextCounter(TaskInputOutputContext context, Enum<?> counter, long incr) { context.getCounter(counter).increment(incr); } @Override public Configuration getContextConfiguration(TaskAttemptContext context) { return context.getConfiguration(); } @Override public long getCounter(MapReduceDriver counters, Enum<?> e) { return counters.getCounters().findCounter(e).getValue(); } @Override public JobClasspathConfigurer newMapredJarConfigurer(String mapredJarPath) { return new MapredJarConfigurer(mapredJarPath); } @Override public JobClasspathConfigurer newDistCacheConfigurer() { return new DistCacheConfigurer("titan-hadoop-core-" + TitanConstants.VERSION + ".jar"); } @Override public Configuration getJobContextConfiguration(JobContext context) { return context.getConfiguration(); } @Override public Configuration newImmutableConfiguration(Configuration base) { return new ImmutableConfiguration(base); } }
evanv/titan
titan-hadoop-parent/titan-hadoop-1/src/main/java/com/thinkaurelius/titan/hadoop/compat/h1/Hadoop1Compat.java
Java
apache-2.0
2,628
const path = require('path'); const fs = require('fs'); const EventEmitter = require('events').EventEmitter; const Shard = require('./Shard'); const Collection = require('../util/Collection'); const Util = require('../util/Util'); /** * This is a utility class that can be used to help you spawn shards of your client. Each shard is completely separate * from the other. The Shard Manager takes a path to a file and spawns it under the specified amount of shards safely. * If you do not select an amount of shards, the manager will automatically decide the best amount. * @extends {EventEmitter} */ class ShardingManager extends EventEmitter { /** * @param {string} file Path to your shard script file * @param {Object} [options] Options for the sharding manager * @param {number|string} [options.totalShards='auto'] Number of shards to spawn, or "auto" * @param {boolean} [options.respawn=true] Whether shards should automatically respawn upon exiting * @param {string[]} [options.shardArgs=[]] Arguments to pass to the shard script when spawning * @param {string} [options.token] Token to use for automatic shard count and passing to shards */ constructor(file, options = {}) { super(); options = Util.mergeDefault({ totalShards: 'auto', respawn: true, shardArgs: [], token: null, }, options); /** * Path to the shard script file * @type {string} */ this.file = file; if (!file) throw new Error('File must be specified.'); if (!path.isAbsolute(file)) this.file = path.resolve(process.cwd(), file); const stats = fs.statSync(this.file); if (!stats.isFile()) throw new Error('File path does not point to a file.'); /** * Amount of shards that this manager is going to spawn * @type {number|string} */ this.totalShards = options.totalShards; if (this.totalShards !== 'auto') { if (typeof this.totalShards !== 'number' || isNaN(this.totalShards)) { throw new TypeError('Amount of shards must be a number.'); } if (this.totalShards < 1) throw new RangeError('Amount of shards must be at least 1.'); if (this.totalShards !== Math.floor(this.totalShards)) { throw new RangeError('Amount of shards must be an integer.'); } } /** * Whether shards should automatically respawn upon exiting * @type {boolean} */ this.respawn = options.respawn; /** * An array of arguments to pass to shards * @type {string[]} */ this.shardArgs = options.shardArgs; /** * Token to use for obtaining the automatic shard count, and passing to shards * @type {?string} */ this.token = options.token ? options.token.replace(/^Bot\s*/i, '') : null; /** * A collection of shards that this manager has spawned * @type {Collection<number, Shard>} */ this.shards = new Collection(); } /** * Spawns a single shard. * @param {number} id The ID of the shard to spawn. **This is usually not necessary** * @returns {Promise<Shard>} */ createShard(id = this.shards.size) { const shard = new Shard(this, id, this.shardArgs); this.shards.set(id, shard); /** * Emitted upon launching a shard. * @event ShardingManager#launch * @param {Shard} shard Shard that was launched */ this.emit('launch', shard); return Promise.resolve(shard); } /** * Spawns multiple shards. * @param {number} [amount=this.totalShards] Number of shards to spawn * @param {number} [delay=7500] How long to wait in between spawning each shard (in milliseconds) * @returns {Promise<Collection<number, Shard>>} */ spawn(amount = this.totalShards, delay = 7500) { if (amount === 'auto') { return Util.fetchRecommendedShards(this.token).then(count => { this.totalShards = count; return this._spawn(count, delay); }); } else { if (typeof amount !== 'number' || isNaN(amount)) throw new TypeError('Amount of shards must be a number.'); if (amount < 1) throw new RangeError('Amount of shards must be at least 1.'); if (amount !== Math.floor(amount)) throw new TypeError('Amount of shards must be an integer.'); return this._spawn(amount, delay); } } /** * Actually spawns shards, unlike that poser above >:( * @param {number} amount Number of shards to spawn * @param {number} delay How long to wait in between spawning each shard (in milliseconds) * @returns {Promise<Collection<number, Shard>>} * @private */ _spawn(amount, delay) { return new Promise(resolve => { if (this.shards.size >= amount) throw new Error(`Already spawned ${this.shards.size} shards.`); this.totalShards = amount; this.createShard(); if (this.shards.size >= this.totalShards) { resolve(this.shards); return; } if (delay <= 0) { while (this.shards.size < this.totalShards) this.createShard(); resolve(this.shards); } else { const interval = setInterval(() => { this.createShard(); if (this.shards.size >= this.totalShards) { clearInterval(interval); resolve(this.shards); } }, delay); } }); } /** * Send a message to all shards. * @param {*} message Message to be sent to the shards * @returns {Promise<Shard[]>} */ broadcast(message) { const promises = []; for (const shard of this.shards.values()) promises.push(shard.send(message)); return Promise.all(promises); } /** * Evaluates a script on all shards, in the context of the Clients. * @param {string} script JavaScript to run on each shard * @returns {Promise<Array>} Results of the script execution */ broadcastEval(script) { const promises = []; for (const shard of this.shards.values()) promises.push(shard.eval(script)); return Promise.all(promises); } /** * Fetches a client property value of each shard. * @param {string} prop Name of the client property to get, using periods for nesting * @returns {Promise<Array>} * @example * manager.fetchClientValues('guilds.size') * .then(results => { * console.log(`${results.reduce((prev, val) => prev + val, 0)} total guilds`); * }) * .catch(console.error); */ fetchClientValues(prop) { if (this.shards.size === 0) return Promise.reject(new Error('No shards have been spawned.')); if (this.shards.size !== this.totalShards) return Promise.reject(new Error('Still spawning shards.')); const promises = []; for (const shard of this.shards.values()) promises.push(shard.fetchClientValue(prop)); return Promise.all(promises); } } module.exports = ShardingManager;
willkiller13/RivoBot
sharding/ShardingManager.js
JavaScript
apache-2.0
6,803
using System; using System.Collections.Generic; using System.Threading.Tasks; using Elasticsearch.Net; namespace Nest { public partial class ElasticClient { /// <inheritdoc /> public ISearchResponse<T> MoreLikeThis<T>(Func<MoreLikeThisDescriptor<T>, MoreLikeThisDescriptor<T>> mltSelector) where T : class { return this.Dispatcher.Dispatch<MoreLikeThisDescriptor<T>, MoreLikeThisRequestParameters, SearchResponse<T>>( mltSelector, (p, d) => { IMoreLikeThisRequest r = d; CopySearchRequestParameters(d); return this.RawDispatch.MltDispatch<SearchResponse<T>>(p, r.Search); } ); } /// <inheritdoc /> public ISearchResponse<T> MoreLikeThis<T>(IMoreLikeThisRequest moreLikeThisRequest) where T : class { return this.Dispatcher.Dispatch<IMoreLikeThisRequest, MoreLikeThisRequestParameters, SearchResponse<T>>( moreLikeThisRequest, (p, d) => { CopySearchRequestParameters(d); return this.RawDispatch.MltDispatch<SearchResponse<T>>(p, d.Search); } ); } /// <inheritdoc /> public Task<ISearchResponse<T>> MoreLikeThisAsync<T>(Func<MoreLikeThisDescriptor<T>, MoreLikeThisDescriptor<T>> mltSelector) where T : class { return this.Dispatcher.DispatchAsync<MoreLikeThisDescriptor<T>, MoreLikeThisRequestParameters, SearchResponse<T>, ISearchResponse<T>>( mltSelector, (p, d) => { IMoreLikeThisRequest r = d; CopySearchRequestParameters(d); return this.RawDispatch.MltDispatchAsync<SearchResponse<T>>(p, r.Search); } ); } /// <inheritdoc /> public Task<ISearchResponse<T>> MoreLikeThisAsync<T>(IMoreLikeThisRequest moreLikeThisRequest) where T : class { return this.Dispatcher.DispatchAsync<IMoreLikeThisRequest, MoreLikeThisRequestParameters, SearchResponse<T>, ISearchResponse<T>>( moreLikeThisRequest, (p, d) => { CopySearchRequestParameters(d); return this.RawDispatch.MltDispatchAsync<SearchResponse<T>>(p, d.Search); } ); } private static void CopySearchRequestParameters(IMoreLikeThisRequest request) { if (request.Search == null) return; request.RequestParameters.CopyQueryStringValuesFrom(request.Search.QueryString); } } }
joehmchan/elasticsearch-net
src/Nest/ElasticClient-MoreLikeThis.cs
C#
apache-2.0
2,228
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.executiongraph; /** Base class for exceptions occurring in the {@link ExecutionGraph}. */ public class ExecutionGraphException extends Exception { private static final long serialVersionUID = -8253451032797220657L; public ExecutionGraphException(String message) { super(message); } public ExecutionGraphException(String message, Throwable cause) { super(message, cause); } public ExecutionGraphException(Throwable cause) { super(cause); } }
tillrohrmann/flink
flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionGraphException.java
Java
apache-2.0
1,338
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.rest.messages.job.metrics; /** Headers for aggregating job metrics. */ public class AggregatedJobMetricsHeaders extends AbstractAggregatedMetricsHeaders<AggregatedJobMetricsParameters> { private static final AggregatedJobMetricsHeaders INSTANCE = new AggregatedJobMetricsHeaders(); private AggregatedJobMetricsHeaders() {} @Override public String getTargetRestEndpointURL() { return "/jobs/metrics"; } @Override public AggregatedJobMetricsParameters getUnresolvedMessageParameters() { return new AggregatedJobMetricsParameters(); } public static AggregatedJobMetricsHeaders getInstance() { return INSTANCE; } @Override public String getDescription() { return "Provides access to aggregated job metrics."; } }
apache/flink
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/metrics/AggregatedJobMetricsHeaders.java
Java
apache-2.0
1,648
/* * Copyright (C) 2010-2101 Alibaba Group Holding Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.otter.node.etl.conflict.exception; import org.apache.commons.lang.exception.NestableRuntimeException; /** * @author jianghang 2012-4-12 下午02:59:12 * @version 4.0.2 */ public class ConflictException extends NestableRuntimeException { private static final long serialVersionUID = -7288830284122672209L; private String errorCode; private String errorDesc; public ConflictException(String errorCode){ super(errorCode); } public ConflictException(String errorCode, Throwable cause){ super(errorCode, cause); } public ConflictException(String errorCode, String errorDesc){ super(errorCode + ":" + errorDesc); } public ConflictException(String errorCode, String errorDesc, Throwable cause){ super(errorCode + ":" + errorDesc, cause); } public ConflictException(Throwable cause){ super(cause); } public String getErrorCode() { return errorCode; } public String getErrorDesc() { return errorDesc; } @Override public Throwable fillInStackTrace() { return this; } }
wangcan2014/otter
node/etl/src/main/java/com/alibaba/otter/node/etl/conflict/exception/ConflictException.java
Java
apache-2.0
1,785
package org.csstudio.swt.xygraph.util; import org.csstudio.swt.xygraph.figures.XYGraph; import org.eclipse.draw2d.FigureUtilities; import org.eclipse.draw2d.SWTGraphics; import org.eclipse.draw2d.geometry.Dimension; import org.eclipse.draw2d.geometry.Rectangle; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Cursor; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.GC; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.graphics.ImageData; import org.eclipse.swt.graphics.RGB; import org.eclipse.swt.graphics.Transform; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.FileDialog; public class SingleSourceHelperImpl extends SingleSourceHelper { @Override protected Cursor createInternalCursor(Display display, ImageData imageData, int width, int height, int style) { return new Cursor(display, imageData, width, height); } @Override protected Image createInternalVerticalTextImage(String text, Font font, RGB color, boolean upToDown) { final Dimension titleSize = FigureUtilities.getTextExtents(text, font); final int w = titleSize.height; final int h = titleSize.width + 1; Image image = new Image(Display.getCurrent(), w, h); final GC gc = new GC(image); final Color titleColor = new Color(Display.getCurrent(), color); RGB transparentRGB = new RGB(240, 240, 240); gc.setBackground(XYGraphMediaFactory.getInstance().getColor( transparentRGB)); gc.fillRectangle(image.getBounds()); gc.setForeground(titleColor); gc.setFont(font); final Transform tr = new Transform(Display.getCurrent()); if (!upToDown) { tr.translate(0, h); tr.rotate(-90); gc.setTransform(tr); } else { tr.translate(w, 0); tr.rotate(90); gc.setTransform(tr); } gc.drawText(text, 0, 0); tr.dispose(); gc.dispose(); final ImageData imageData = image.getImageData(); image.dispose(); titleColor.dispose(); imageData.transparentPixel = imageData.palette.getPixel(transparentRGB); image = new Image(Display.getCurrent(), imageData); return image; } @Override protected Image getInternalXYGraphSnapShot(XYGraph xyGraph) { Rectangle bounds = xyGraph.getBounds(); Image image = new Image(null, bounds.width + 6, bounds.height + 6); GC gc = new GC(image); SWTGraphics graphics = new SWTGraphics(gc); graphics.translate(-bounds.x + 3, -bounds.y + 3); graphics.setForegroundColor(xyGraph.getForegroundColor()); graphics.setBackgroundColor(xyGraph.getBackgroundColor()); xyGraph.paint(graphics); gc.dispose(); return image; } @Override protected String getInternalImageSavePath() { FileDialog dialog = new FileDialog(Display.getDefault().getShells()[0], SWT.SAVE); dialog.setFilterNames(new String[] { "PNG Files", "All Files (*.*)" }); dialog.setFilterExtensions(new String[] { "*.png", "*.*" }); // Windows String path = dialog.open(); return path; } }
jhshin9/scouter
scouter.client/src/org/csstudio/swt/xygraph/util/SingleSourceHelperImpl.java
Java
apache-2.0
3,045
<?php if (!defined('APPLICATION')) exit(); $Alt = FALSE; $Session = Gdn::Session(); $EditUser = $Session->CheckPermission('Garden.Users.Edit'); $DeleteUser = $Session->CheckPermission('Garden.Users.Delete'); foreach ($this->UserData->Result() as $User) { $Alt = $Alt ? FALSE : TRUE; ?> <tr id="<?php echo "UserID_{$User->UserID}"; ?>"<?php echo $Alt ? ' class="Alt"' : ''; ?>> <!-- <td class="CheckboxCell"><input type="checkbox" name="LogID[]" value="<?php echo $User->UserID; ?>" /></td>--> <td><strong><?php echo UserAnchor($User); ?></strong></td> <td class="Alt"><?php echo Gdn_Format::Email($User->Email); ?></td> <td style="max-width: 200px;"> <?php $Roles = GetValue('Roles', $User, array()); $RolesString = ''; if ($User->Banned && !in_array('Banned', $Roles)) { $RolesString = T('Banned'); } if ($User->Admin > 1) { $RolesString = ConcatSep(', ', $RolesString, T('System')); } foreach ($Roles as $RoleID => $RoleName) { $Query = http_build_query(array('Keywords' => $RoleName)); $RolesString = ConcatSep(', ', $RolesString, '<a href="'.Url('/user/browse?'.$Query).'">'.htmlspecialchars($RoleName).'</a>'); } echo $RolesString; ?> </td> <td class="Alt"><?php echo Gdn_Format::Date($User->DateFirstVisit, 'html'); ?></td> <td><?php echo Gdn_Format::Date($User->DateLastActive, 'html'); ?></td> <td><?php echo htmlspecialchars($User->LastIPAddress); ?></td> <?php $this->EventArgs['User'] = $User; $this->FireEvent('UserCell'); ?> <?php if ($EditUser || $DeleteUser) { ?> <td><?php if ($EditUser) echo Anchor(T('Edit'), '/user/edit/'.$User->UserID, 'Popup SmallButton'); if ($DeleteUser && $User->UserID != $Session->User->UserID) echo Anchor(T('Delete'), '/user/delete/'.$User->UserID, 'SmallButton'); $this->EventArguments['User'] = $User; $this->FireEvent('UserListOptions'); ?></td> <?php } ?> </tr> <?php }
ppazos/cabolabs-web-old
forum/applications/dashboard/views/user/users.php
PHP
apache-2.0
2,182
/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1alpha1 // This file contains a collection of methods that can be used from go-restful to // generate Swagger API documentation for its models. Please read this PR for more // information on the implementation: https://github.com/emicklei/go-restful/pull/215 // // TODOs are ignored from the parser (e.g. TODO(andronat):... || TODO:...) if and only if // they are on one line! For multiple line or blocks that you want to ignore use ---. // Any context after a --- is ignored. // // Those methods can be generated by using hack/update-generated-swagger-docs.sh // AUTO-GENERATED FUNCTIONS START HERE var map_CertificateSigningRequest = map[string]string{ "": "Describes a certificate signing request", "spec": "The certificate request itself and any additional information.", "status": "Derived information about the request.", } func (CertificateSigningRequest) SwaggerDoc() map[string]string { return map_CertificateSigningRequest } var map_CertificateSigningRequestCondition = map[string]string{ "type": "request approval state, currently Approved or Denied.", "reason": "brief reason for the request state", "message": "human readable message with details about the request state", "lastUpdateTime": "timestamp for the last update to this condition", } func (CertificateSigningRequestCondition) SwaggerDoc() map[string]string { return map_CertificateSigningRequestCondition } var map_CertificateSigningRequestSpec = map[string]string{ "": "This information is immutable after the request is created. Only the Request and ExtraInfo fields can be set on creation, other fields are derived by Kubernetes and cannot be modified by users.", "request": "Base64-encoded PKCS#10 CSR data", "usages": "allowedUsages specifies a set of usage contexts the key will be valid for. See: https://tools.ietf.org/html/rfc5280#section-4.2.1.3\n https://tools.ietf.org/html/rfc5280#section-4.2.1.12", "username": "Information about the requesting user (if relevant) See user.Info interface for details", } func (CertificateSigningRequestSpec) SwaggerDoc() map[string]string { return map_CertificateSigningRequestSpec } var map_CertificateSigningRequestStatus = map[string]string{ "conditions": "Conditions applied to the request, such as approval or denial.", "certificate": "If request was approved, the controller will place the issued certificate here.", } func (CertificateSigningRequestStatus) SwaggerDoc() map[string]string { return map_CertificateSigningRequestStatus } // AUTO-GENERATED FUNCTIONS END HERE
thomasschickinger/kubernetes
staging/src/k8s.io/client-go/pkg/apis/certificates/v1alpha1/types_swagger_doc_generated.go
GO
apache-2.0
3,155
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ 'use strict'; const path = require('../fastpath'); const getPlatformExtension = require('./getPlatformExtension'); function getAssetDataFromName(filename, platforms) { const ext = path.extname(filename); const platformExt = getPlatformExtension(filename, platforms); let pattern = '@([\\d\\.]+)x'; if (platformExt != null) { pattern += '(\\.' + platformExt + ')?'; } pattern += '\\' + ext + '$'; const re = new RegExp(pattern); const match = filename.match(re); let resolution; if (!(match && match[1])) { resolution = 1; } else { resolution = parseFloat(match[1], 10); if (isNaN(resolution)) { resolution = 1; } } let assetName; if (match) { assetName = filename.replace(re, ext); } else if (platformExt != null) { assetName = filename.replace(new RegExp(`\\.${platformExt}\\${ext}`), ext); } else { assetName = filename; } return { resolution: resolution, assetName: assetName, type: ext.slice(1), name: path.basename(assetName, ext), platform: platformExt, }; } module.exports = getAssetDataFromName;
facebook/node-haste
src/lib/getAssetDataFromName.js
JavaScript
bsd-3-clause
1,418
var utils = require('../../lib/utils'); // if they agree to the ULA, notify hubspot, create a trial and send verification link module.exports = function trialSignup(request, reply) { var postToHubspot = request.server.methods.npme.sendData, getCustomer = request.server.methods.npme.getCustomer; var opts = {}; var data = { hs_context: { pageName: "enterprise-trial-signup", ipAddress: utils.getUserIP(request) }, // we can trust the email is fine because we've verified it in the show-ula handler email: request.payload.customer_email, }; postToHubspot(process.env.HUBSPOT_FORM_NPME_AGREED_ULA, data, function(er) { if (er) { request.logger.error('Could not hit ULA notification form on Hubspot'); request.logger.error(er); reply.view('errors/internal', opts).code(500); return; } getCustomer(data.email, function(err, customer) { if (err) { request.logger.error('Unknown problem with customer record'); request.logger.error(err); reply.view('errors/internal', opts).code(500); return; } if (!customer) { request.logger.error('Unable to locate customer error ' + data.email); reply.view('errors/internal', opts).code(500); return; } if (customer && customer.id + '' === request.payload.customer_id + '') { return createTrialAccount(request, reply, customer); } request.logger.error('Unable to verify customer record ', data.email); reply.view('errors/internal', opts).code(500); }); }); }; function createTrialAccount(request, reply, customer) { var createTrial = request.server.methods.npme.createTrial; var opts = {}; createTrial(customer, function(er, trial) { if (er) { request.logger.error('There was an error with creating a trial for ', customer.id); request.logger.error(er); reply.view('errors/internal', opts).code(500); return; } return sendVerificationEmail(request, reply, customer, trial); }); } function sendVerificationEmail(request, reply, customer, trial) { var opts = {}; var sendEmail = request.server.methods.email.send; var user = { name: customer.name, email: customer.email, verification_key: trial.verification_key }; sendEmail('npme-trial-verification', user, request.redis) .catch(function(er) { request.logger.error('Unable to send verification email to ', customer); request.logger.error(er); reply.view('errors/internal', opts).code(500); return; }) .then(function() { return reply.view('enterprise/thanks', opts); }); }
AgtLucas/newww
facets/enterprise/show-trial-signup.js
JavaScript
isc
2,680
using System; using System.Collections.Generic; using UIKit; using Foundation; using System.Reflection; namespace FontList.Code { /// <summary> /// Combined DataSource and Delegate for our UITableView /// </summary> public class NavItemTableSource : UITableViewSource { protected List<NavItemGroup> navItems; string cellIdentifier = "NavTableCellView"; UINavigationController navigationController; public NavItemTableSource (UINavigationController navigationController, List<NavItemGroup> items) { navItems = items; this.navigationController = navigationController; } /// <summary> /// Called by the TableView to determine how many sections(groups) there are. /// </summary> public override nint NumberOfSections (UITableView tableView) { return navItems.Count; } /// <summary> /// Called by the TableView to determine how many cells to create for that particular section. /// </summary> public override nint RowsInSection (UITableView tableview, nint section) { return navItems[(int)section].Items.Count; } /// <summary> /// Called by the TableView to retrieve the header text for the particular section(group) /// </summary> public override string TitleForHeader (UITableView tableView, nint section) { return navItems[(int)section].Name; } /// <summary> /// Called by the TableView to retrieve the footer text for the particular section(group) /// </summary> public override string TitleForFooter (UITableView tableView, nint section) { return navItems[(int)section].Footer; } /// <summary> /// Called by the TableView to actually build each cell. /// </summary> public override UITableViewCell GetCell (UITableView tableView, NSIndexPath indexPath) { NavItem navItem = this.navItems[indexPath.Section].Items[indexPath.Row]; var cell = tableView.DequeueReusableCell (this.cellIdentifier); if (cell == null) { cell = new UITableViewCell (UITableViewCellStyle.Default, this.cellIdentifier); cell.Tag = Environment.TickCount; } //---- set the cell properties cell.TextLabel.Text = this.navItems[indexPath.Section].Items[indexPath.Row].Name; cell.Accessory = UITableViewCellAccessory.DisclosureIndicator; if (navItem.Font != null) { cell.TextLabel.Font = navItem.Font; } return cell; } /// <summary> /// Is called when a row is selected /// </summary> public override void RowSelected (UITableView tableView, NSIndexPath indexPath) { //---- get a reference to the nav item NavItem navItem = navItems[indexPath.Section].Items[indexPath.Row]; // if the nav item has a proper controller, push it on to the NavigationController // NOTE: we could also raise an event here, to loosely couple this, but isn't neccessary, // because we'll only ever use this this way if (navItem.Controller != null) { navigationController.PushViewController (navItem.Controller, true); // show the nav bar (we don't show it on the home page) navigationController.NavigationBarHidden = false; } else { if (navItem.ControllerType != null) { ConstructorInfo ctor = null; // if the nav item has constructor aguments if (navItem.ControllerConstructorArgs.Length > 0) { // look for the constructor ctor = navItem.ControllerType.GetConstructor (navItem.ControllerConstructorTypes); } else { // search for the default constructor ctor = navItem.ControllerType.GetConstructor (System.Type.EmptyTypes); } // if we found the constructor if (ctor != null) { UIViewController instance = null; if (navItem.ControllerConstructorArgs.Length > 0) { // instance the view controller instance = ctor.Invoke (navItem.ControllerConstructorArgs) as UIViewController; } else { // instance the view controller instance = ctor.Invoke (null) as UIViewController; } if (instance != null) { // save the object navItem.Controller = instance; // push the view controller onto the stack navigationController.PushViewController (navItem.Controller, true); } else { Console.WriteLine ("instance of view controller not created"); } } else { Console.WriteLine ("constructor not found"); } } } } } }
davidrynn/monotouch-samples
FontList/Code/NavItemTableSource.cs
C#
mit
4,333
var insert = require('./insert') var concat = require('concat-stream') insert('aggregate', [{ name: 'Squirtle', type: 'water' }, { name: 'Starmie', type: 'water' }, { name: 'Charmander', type: 'fire' }, { name: 'Lapras', type: 'water' }], function (db, t, done) { db.a.aggregate([{$group: {_id: '$type'}}, {$project: { _id: 0, foo: '$_id' }}], function (err, types) { console.log(err, types) var arr = types.map(function (x) {return x.foo}) console.log('arr', arr) t.equal(types.length, 2) console.log('here') t.notEqual(arr.indexOf('fire'), -1) console.log('there') t.notEqual(arr.indexOf('water'), -1) console.log('where') // test as a stream var strm = db.a.aggregate([{$group: {_id: '$type'}}, {$project: {_id: 0, foo: '$_id'}}]) strm.pipe(concat(function (types) { var arr = types.map(function (x) {return x.foo}) t.equal(types.length, 2) t.notEqual(arr.indexOf('fire'), -1) t.notEqual(arr.indexOf('water'), -1) t.end() })) strm.on('error', function (err) { // Aggregation cursors are only supported on mongodb 2.6+ // this shouldn't fail the tests for other versions of mongodb if (err.message === 'unrecognized field "cursor') t.ok(1) else t.fail(err) t.end() }) }) })
AMKohn/mongojs
test/test-aggregate-pipeline.js
JavaScript
mit
1,310
<?php namespace PhpOffice\PhpSpreadsheet\Worksheet; use ArrayObject; use PhpOffice\PhpSpreadsheet\Calculation\Calculation; use PhpOffice\PhpSpreadsheet\Cell\Cell; use PhpOffice\PhpSpreadsheet\Cell\Coordinate; use PhpOffice\PhpSpreadsheet\Cell\DataType; use PhpOffice\PhpSpreadsheet\Cell\DataValidation; use PhpOffice\PhpSpreadsheet\Cell\Hyperlink; use PhpOffice\PhpSpreadsheet\Chart\Chart; use PhpOffice\PhpSpreadsheet\Collection\Cells; use PhpOffice\PhpSpreadsheet\Collection\CellsFactory; use PhpOffice\PhpSpreadsheet\Comment; use PhpOffice\PhpSpreadsheet\Exception; use PhpOffice\PhpSpreadsheet\IComparable; use PhpOffice\PhpSpreadsheet\NamedRange; use PhpOffice\PhpSpreadsheet\ReferenceHelper; use PhpOffice\PhpSpreadsheet\RichText\RichText; use PhpOffice\PhpSpreadsheet\Shared; use PhpOffice\PhpSpreadsheet\Spreadsheet; use PhpOffice\PhpSpreadsheet\Style\Color; use PhpOffice\PhpSpreadsheet\Style\Conditional; use PhpOffice\PhpSpreadsheet\Style\NumberFormat; use PhpOffice\PhpSpreadsheet\Style\Style; class Worksheet implements IComparable { // Break types const BREAK_NONE = 0; const BREAK_ROW = 1; const BREAK_COLUMN = 2; // Sheet state const SHEETSTATE_VISIBLE = 'visible'; const SHEETSTATE_HIDDEN = 'hidden'; const SHEETSTATE_VERYHIDDEN = 'veryHidden'; /** * Maximum 31 characters allowed for sheet title. * * @var int */ const SHEET_TITLE_MAXIMUM_LENGTH = 31; /** * Invalid characters in sheet title. * * @var array */ private static $invalidCharacters = ['*', ':', '/', '\\', '?', '[', ']']; /** * Parent spreadsheet. * * @var Spreadsheet */ private $parent; /** * Collection of cells. * * @var Cells */ private $cellCollection; /** * Collection of row dimensions. * * @var RowDimension[] */ private $rowDimensions = []; /** * Default row dimension. * * @var RowDimension */ private $defaultRowDimension; /** * Collection of column dimensions. * * @var ColumnDimension[] */ private $columnDimensions = []; /** * Default column dimension. * * @var ColumnDimension */ private $defaultColumnDimension; /** * Collection of drawings. * * @var BaseDrawing[] */ private $drawingCollection; /** * Collection of Chart objects. * * @var Chart[] */ private $chartCollection = []; /** * Worksheet title. * * @var string */ private $title; /** * Sheet state. * * @var string */ private $sheetState; /** * Page setup. * * @var PageSetup */ private $pageSetup; /** * Page margins. * * @var PageMargins */ private $pageMargins; /** * Page header/footer. * * @var HeaderFooter */ private $headerFooter; /** * Sheet view. * * @var SheetView */ private $sheetView; /** * Protection. * * @var Protection */ private $protection; /** * Collection of styles. * * @var Style[] */ private $styles = []; /** * Conditional styles. Indexed by cell coordinate, e.g. 'A1'. * * @var array */ private $conditionalStylesCollection = []; /** * Is the current cell collection sorted already? * * @var bool */ private $cellCollectionIsSorted = false; /** * Collection of breaks. * * @var array */ private $breaks = []; /** * Collection of merged cell ranges. * * @var array */ private $mergeCells = []; /** * Collection of protected cell ranges. * * @var array */ private $protectedCells = []; /** * Autofilter Range and selection. * * @var AutoFilter */ private $autoFilter; /** * Freeze pane. * * @var null|string */ private $freezePane; /** * Default position of the right bottom pane. * * @var null|string */ private $topLeftCell; /** * Show gridlines? * * @var bool */ private $showGridlines = true; /** * Print gridlines? * * @var bool */ private $printGridlines = false; /** * Show row and column headers? * * @var bool */ private $showRowColHeaders = true; /** * Show summary below? (Row/Column outline). * * @var bool */ private $showSummaryBelow = true; /** * Show summary right? (Row/Column outline). * * @var bool */ private $showSummaryRight = true; /** * Collection of comments. * * @var Comment[] */ private $comments = []; /** * Active cell. (Only one!). * * @var string */ private $activeCell = 'A1'; /** * Selected cells. * * @var string */ private $selectedCells = 'A1'; /** * Cached highest column. * * @var string */ private $cachedHighestColumn = 'A'; /** * Cached highest row. * * @var int */ private $cachedHighestRow = 1; /** * Right-to-left? * * @var bool */ private $rightToLeft = false; /** * Hyperlinks. Indexed by cell coordinate, e.g. 'A1'. * * @var array */ private $hyperlinkCollection = []; /** * Data validation objects. Indexed by cell coordinate, e.g. 'A1'. * * @var array */ private $dataValidationCollection = []; /** * Tab color. * * @var Color */ private $tabColor; /** * Dirty flag. * * @var bool */ private $dirty = true; /** * Hash. * * @var string */ private $hash; /** * CodeName. * * @var string */ private $codeName; /** * Create a new worksheet. * * @param Spreadsheet $parent * @param string $pTitle */ public function __construct(Spreadsheet $parent = null, $pTitle = 'Worksheet') { // Set parent and title $this->parent = $parent; $this->setTitle($pTitle, false); // setTitle can change $pTitle $this->setCodeName($this->getTitle()); $this->setSheetState(self::SHEETSTATE_VISIBLE); $this->cellCollection = CellsFactory::getInstance($this); // Set page setup $this->pageSetup = new PageSetup(); // Set page margins $this->pageMargins = new PageMargins(); // Set page header/footer $this->headerFooter = new HeaderFooter(); // Set sheet view $this->sheetView = new SheetView(); // Drawing collection $this->drawingCollection = new \ArrayObject(); // Chart collection $this->chartCollection = new \ArrayObject(); // Protection $this->protection = new Protection(); // Default row dimension $this->defaultRowDimension = new RowDimension(null); // Default column dimension $this->defaultColumnDimension = new ColumnDimension(null); $this->autoFilter = new AutoFilter(null, $this); } /** * Disconnect all cells from this Worksheet object, * typically so that the worksheet object can be unset. */ public function disconnectCells() { if ($this->cellCollection !== null) { $this->cellCollection->unsetWorksheetCells(); $this->cellCollection = null; } // detach ourself from the workbook, so that it can then delete this worksheet successfully $this->parent = null; } /** * Code to execute when this worksheet is unset(). */ public function __destruct() { Calculation::getInstance($this->parent)->clearCalculationCacheForWorksheet($this->title); $this->disconnectCells(); } /** * Return the cell collection. * * @return Cells */ public function getCellCollection() { return $this->cellCollection; } /** * Get array of invalid characters for sheet title. * * @return array */ public static function getInvalidCharacters() { return self::$invalidCharacters; } /** * Check sheet code name for valid Excel syntax. * * @param string $pValue The string to check * * @throws Exception * * @return string The valid string */ private static function checkSheetCodeName($pValue) { $CharCount = Shared\StringHelper::countCharacters($pValue); if ($CharCount == 0) { throw new Exception('Sheet code name cannot be empty.'); } // Some of the printable ASCII characters are invalid: * : / \ ? [ ] and first and last characters cannot be a "'" if ((str_replace(self::$invalidCharacters, '', $pValue) !== $pValue) || (Shared\StringHelper::substring($pValue, -1, 1) == '\'') || (Shared\StringHelper::substring($pValue, 0, 1) == '\'')) { throw new Exception('Invalid character found in sheet code name'); } // Enforce maximum characters allowed for sheet title if ($CharCount > self::SHEET_TITLE_MAXIMUM_LENGTH) { throw new Exception('Maximum ' . self::SHEET_TITLE_MAXIMUM_LENGTH . ' characters allowed in sheet code name.'); } return $pValue; } /** * Check sheet title for valid Excel syntax. * * @param string $pValue The string to check * * @throws Exception * * @return string The valid string */ private static function checkSheetTitle($pValue) { // Some of the printable ASCII characters are invalid: * : / \ ? [ ] if (str_replace(self::$invalidCharacters, '', $pValue) !== $pValue) { throw new Exception('Invalid character found in sheet title'); } // Enforce maximum characters allowed for sheet title if (Shared\StringHelper::countCharacters($pValue) > self::SHEET_TITLE_MAXIMUM_LENGTH) { throw new Exception('Maximum ' . self::SHEET_TITLE_MAXIMUM_LENGTH . ' characters allowed in sheet title.'); } return $pValue; } /** * Get a sorted list of all cell coordinates currently held in the collection by row and column. * * @param bool $sorted Also sort the cell collection? * * @return string[] */ public function getCoordinates($sorted = true) { if ($this->cellCollection == null) { return []; } if ($sorted) { return $this->cellCollection->getSortedCoordinates(); } return $this->cellCollection->getCoordinates(); } /** * Get collection of row dimensions. * * @return RowDimension[] */ public function getRowDimensions() { return $this->rowDimensions; } /** * Get default row dimension. * * @return RowDimension */ public function getDefaultRowDimension() { return $this->defaultRowDimension; } /** * Get collection of column dimensions. * * @return ColumnDimension[] */ public function getColumnDimensions() { return $this->columnDimensions; } /** * Get default column dimension. * * @return ColumnDimension */ public function getDefaultColumnDimension() { return $this->defaultColumnDimension; } /** * Get collection of drawings. * * @return BaseDrawing[] */ public function getDrawingCollection() { return $this->drawingCollection; } /** * Get collection of charts. * * @return Chart[] */ public function getChartCollection() { return $this->chartCollection; } /** * Add chart. * * @param Chart $pChart * @param null|int $iChartIndex Index where chart should go (0,1,..., or null for last) * * @return Chart */ public function addChart(Chart $pChart, $iChartIndex = null) { $pChart->setWorksheet($this); if ($iChartIndex === null) { $this->chartCollection[] = $pChart; } else { // Insert the chart at the requested index array_splice($this->chartCollection, $iChartIndex, 0, [$pChart]); } return $pChart; } /** * Return the count of charts on this worksheet. * * @return int The number of charts */ public function getChartCount() { return count($this->chartCollection); } /** * Get a chart by its index position. * * @param string $index Chart index position * * @return Chart|false */ public function getChartByIndex($index) { $chartCount = count($this->chartCollection); if ($chartCount == 0) { return false; } if ($index === null) { $index = --$chartCount; } if (!isset($this->chartCollection[$index])) { return false; } return $this->chartCollection[$index]; } /** * Return an array of the names of charts on this worksheet. * * @return string[] The names of charts */ public function getChartNames() { $chartNames = []; foreach ($this->chartCollection as $chart) { $chartNames[] = $chart->getName(); } return $chartNames; } /** * Get a chart by name. * * @param string $chartName Chart name * * @return Chart|false */ public function getChartByName($chartName) { $chartCount = count($this->chartCollection); if ($chartCount == 0) { return false; } foreach ($this->chartCollection as $index => $chart) { if ($chart->getName() == $chartName) { return $this->chartCollection[$index]; } } return false; } /** * Refresh column dimensions. * * @return Worksheet */ public function refreshColumnDimensions() { $currentColumnDimensions = $this->getColumnDimensions(); $newColumnDimensions = []; foreach ($currentColumnDimensions as $objColumnDimension) { $newColumnDimensions[$objColumnDimension->getColumnIndex()] = $objColumnDimension; } $this->columnDimensions = $newColumnDimensions; return $this; } /** * Refresh row dimensions. * * @return Worksheet */ public function refreshRowDimensions() { $currentRowDimensions = $this->getRowDimensions(); $newRowDimensions = []; foreach ($currentRowDimensions as $objRowDimension) { $newRowDimensions[$objRowDimension->getRowIndex()] = $objRowDimension; } $this->rowDimensions = $newRowDimensions; return $this; } /** * Calculate worksheet dimension. * * @return string String containing the dimension of this worksheet */ public function calculateWorksheetDimension() { // Return return 'A1' . ':' . $this->getHighestColumn() . $this->getHighestRow(); } /** * Calculate worksheet data dimension. * * @return string String containing the dimension of this worksheet that actually contain data */ public function calculateWorksheetDataDimension() { // Return return 'A1' . ':' . $this->getHighestDataColumn() . $this->getHighestDataRow(); } /** * Calculate widths for auto-size columns. * * @return Worksheet; */ public function calculateColumnWidths() { // initialize $autoSizes array $autoSizes = []; foreach ($this->getColumnDimensions() as $colDimension) { if ($colDimension->getAutoSize()) { $autoSizes[$colDimension->getColumnIndex()] = -1; } } // There is only something to do if there are some auto-size columns if (!empty($autoSizes)) { // build list of cells references that participate in a merge $isMergeCell = []; foreach ($this->getMergeCells() as $cells) { foreach (Coordinate::extractAllCellReferencesInRange($cells) as $cellReference) { $isMergeCell[$cellReference] = true; } } // loop through all cells in the worksheet foreach ($this->getCoordinates(false) as $coordinate) { $cell = $this->getCell($coordinate, false); if ($cell !== null && isset($autoSizes[$this->cellCollection->getCurrentColumn()])) { //Determine if cell is in merge range $isMerged = isset($isMergeCell[$this->cellCollection->getCurrentCoordinate()]); //By default merged cells should be ignored $isMergedButProceed = false; //The only exception is if it's a merge range value cell of a 'vertical' randge (1 column wide) if ($isMerged && $cell->isMergeRangeValueCell()) { $range = $cell->getMergeRange(); $rangeBoundaries = Coordinate::rangeDimension($range); if ($rangeBoundaries[0] == 1) { $isMergedButProceed = true; } } // Determine width if cell does not participate in a merge or does and is a value cell of 1-column wide range if (!$isMerged || $isMergedButProceed) { // Calculated value // To formatted string $cellValue = NumberFormat::toFormattedString( $cell->getCalculatedValue(), $this->getParent()->getCellXfByIndex($cell->getXfIndex())->getNumberFormat()->getFormatCode() ); $autoSizes[$this->cellCollection->getCurrentColumn()] = max( (float) $autoSizes[$this->cellCollection->getCurrentColumn()], (float) Shared\Font::calculateColumnWidth( $this->getParent()->getCellXfByIndex($cell->getXfIndex())->getFont(), $cellValue, $this->getParent()->getCellXfByIndex($cell->getXfIndex())->getAlignment()->getTextRotation(), $this->getParent()->getDefaultStyle()->getFont() ) ); } } } // adjust column widths foreach ($autoSizes as $columnIndex => $width) { if ($width == -1) { $width = $this->getDefaultColumnDimension()->getWidth(); } $this->getColumnDimension($columnIndex)->setWidth($width); } } return $this; } /** * Get parent. * * @return Spreadsheet */ public function getParent() { return $this->parent; } /** * Re-bind parent. * * @param Spreadsheet $parent * * @return Worksheet */ public function rebindParent(Spreadsheet $parent) { if ($this->parent !== null) { $namedRanges = $this->parent->getNamedRanges(); foreach ($namedRanges as $namedRange) { $parent->addNamedRange($namedRange); } $this->parent->removeSheetByIndex( $this->parent->getIndex($this) ); } $this->parent = $parent; return $this; } /** * Get title. * * @return string */ public function getTitle() { return $this->title; } /** * Set title. * * @param string $pValue String containing the dimension of this worksheet * @param bool $updateFormulaCellReferences Flag indicating whether cell references in formulae should * be updated to reflect the new sheet name. * This should be left as the default true, unless you are * certain that no formula cells on any worksheet contain * references to this worksheet * @param bool $validate False to skip validation of new title. WARNING: This should only be set * at parse time (by Readers), where titles can be assumed to be valid. * * @return Worksheet */ public function setTitle($pValue, $updateFormulaCellReferences = true, $validate = true) { // Is this a 'rename' or not? if ($this->getTitle() == $pValue) { return $this; } // Old title $oldTitle = $this->getTitle(); if ($validate) { // Syntax check self::checkSheetTitle($pValue); if ($this->parent) { // Is there already such sheet name? if ($this->parent->sheetNameExists($pValue)) { // Use name, but append with lowest possible integer if (Shared\StringHelper::countCharacters($pValue) > 29) { $pValue = Shared\StringHelper::substring($pValue, 0, 29); } $i = 1; while ($this->parent->sheetNameExists($pValue . ' ' . $i)) { ++$i; if ($i == 10) { if (Shared\StringHelper::countCharacters($pValue) > 28) { $pValue = Shared\StringHelper::substring($pValue, 0, 28); } } elseif ($i == 100) { if (Shared\StringHelper::countCharacters($pValue) > 27) { $pValue = Shared\StringHelper::substring($pValue, 0, 27); } } } $pValue .= " $i"; } } } // Set title $this->title = $pValue; $this->dirty = true; if ($this->parent && $this->parent->getCalculationEngine()) { // New title $newTitle = $this->getTitle(); $this->parent->getCalculationEngine() ->renameCalculationCacheForWorksheet($oldTitle, $newTitle); if ($updateFormulaCellReferences) { ReferenceHelper::getInstance()->updateNamedFormulas($this->parent, $oldTitle, $newTitle); } } return $this; } /** * Get sheet state. * * @return string Sheet state (visible, hidden, veryHidden) */ public function getSheetState() { return $this->sheetState; } /** * Set sheet state. * * @param string $value Sheet state (visible, hidden, veryHidden) * * @return Worksheet */ public function setSheetState($value) { $this->sheetState = $value; return $this; } /** * Get page setup. * * @return PageSetup */ public function getPageSetup() { return $this->pageSetup; } /** * Set page setup. * * @param PageSetup $pValue * * @return Worksheet */ public function setPageSetup(PageSetup $pValue) { $this->pageSetup = $pValue; return $this; } /** * Get page margins. * * @return PageMargins */ public function getPageMargins() { return $this->pageMargins; } /** * Set page margins. * * @param PageMargins $pValue * * @return Worksheet */ public function setPageMargins(PageMargins $pValue) { $this->pageMargins = $pValue; return $this; } /** * Get page header/footer. * * @return HeaderFooter */ public function getHeaderFooter() { return $this->headerFooter; } /** * Set page header/footer. * * @param HeaderFooter $pValue * * @return Worksheet */ public function setHeaderFooter(HeaderFooter $pValue) { $this->headerFooter = $pValue; return $this; } /** * Get sheet view. * * @return SheetView */ public function getSheetView() { return $this->sheetView; } /** * Set sheet view. * * @param SheetView $pValue * * @return Worksheet */ public function setSheetView(SheetView $pValue) { $this->sheetView = $pValue; return $this; } /** * Get Protection. * * @return Protection */ public function getProtection() { return $this->protection; } /** * Set Protection. * * @param Protection $pValue * * @return Worksheet */ public function setProtection(Protection $pValue) { $this->protection = $pValue; $this->dirty = true; return $this; } /** * Get highest worksheet column. * * @param string $row Return the data highest column for the specified row, * or the highest column of any row if no row number is passed * * @return string Highest column name */ public function getHighestColumn($row = null) { if ($row == null) { return $this->cachedHighestColumn; } return $this->getHighestDataColumn($row); } /** * Get highest worksheet column that contains data. * * @param string $row Return the highest data column for the specified row, * or the highest data column of any row if no row number is passed * * @return string Highest column name that contains data */ public function getHighestDataColumn($row = null) { return $this->cellCollection->getHighestColumn($row); } /** * Get highest worksheet row. * * @param string $column Return the highest data row for the specified column, * or the highest row of any column if no column letter is passed * * @return int Highest row number */ public function getHighestRow($column = null) { if ($column == null) { return $this->cachedHighestRow; } return $this->getHighestDataRow($column); } /** * Get highest worksheet row that contains data. * * @param string $column Return the highest data row for the specified column, * or the highest data row of any column if no column letter is passed * * @return int Highest row number that contains data */ public function getHighestDataRow($column = null) { return $this->cellCollection->getHighestRow($column); } /** * Get highest worksheet column and highest row that have cell records. * * @return array Highest column name and highest row number */ public function getHighestRowAndColumn() { return $this->cellCollection->getHighestRowAndColumn(); } /** * Set a cell value. * * @param string $pCoordinate Coordinate of the cell, eg: 'A1' * @param mixed $pValue Value of the cell * * @return Worksheet */ public function setCellValue($pCoordinate, $pValue) { $this->getCell($pCoordinate)->setValue($pValue); return $this; } /** * Set a cell value by using numeric cell coordinates. * * @param int $columnIndex Numeric column coordinate of the cell * @param int $row Numeric row coordinate of the cell * @param mixed $value Value of the cell * * @return Worksheet */ public function setCellValueByColumnAndRow($columnIndex, $row, $value) { $this->getCellByColumnAndRow($columnIndex, $row)->setValue($value); return $this; } /** * Set a cell value. * * @param string $pCoordinate Coordinate of the cell, eg: 'A1' * @param mixed $pValue Value of the cell * @param string $pDataType Explicit data type, see DataType::TYPE_* * * @return Worksheet */ public function setCellValueExplicit($pCoordinate, $pValue, $pDataType) { // Set value $this->getCell($pCoordinate)->setValueExplicit($pValue, $pDataType); return $this; } /** * Set a cell value by using numeric cell coordinates. * * @param int $columnIndex Numeric column coordinate of the cell * @param int $row Numeric row coordinate of the cell * @param mixed $value Value of the cell * @param string $dataType Explicit data type, see DataType::TYPE_* * * @return Worksheet */ public function setCellValueExplicitByColumnAndRow($columnIndex, $row, $value, $dataType) { $this->getCellByColumnAndRow($columnIndex, $row)->setValueExplicit($value, $dataType); return $this; } /** * Get cell at a specific coordinate. * * @param string $pCoordinate Coordinate of the cell, eg: 'A1' * @param bool $createIfNotExists Flag indicating whether a new cell should be created if it doesn't * already exist, or a null should be returned instead * * @throws Exception * * @return null|Cell Cell that was found/created or null */ public function getCell($pCoordinate, $createIfNotExists = true) { // Uppercase coordinate $pCoordinateUpper = strtoupper($pCoordinate); // Check cell collection if ($this->cellCollection->has($pCoordinateUpper)) { return $this->cellCollection->get($pCoordinateUpper); } // Worksheet reference? if (strpos($pCoordinate, '!') !== false) { $worksheetReference = self::extractSheetTitle($pCoordinate, true); return $this->parent->getSheetByName($worksheetReference[0])->getCell(strtoupper($worksheetReference[1]), $createIfNotExists); } // Named range? if ((!preg_match('/^' . Calculation::CALCULATION_REGEXP_CELLREF . '$/i', $pCoordinate, $matches)) && (preg_match('/^' . Calculation::CALCULATION_REGEXP_NAMEDRANGE . '$/i', $pCoordinate, $matches))) { $namedRange = NamedRange::resolveRange($pCoordinate, $this); if ($namedRange !== null) { $pCoordinate = $namedRange->getRange(); return $namedRange->getWorksheet()->getCell($pCoordinate, $createIfNotExists); } } if (Coordinate::coordinateIsRange($pCoordinate)) { throw new Exception('Cell coordinate can not be a range of cells.'); } elseif (strpos($pCoordinate, '$') !== false) { throw new Exception('Cell coordinate must not be absolute.'); } // Create new cell object, if required return $createIfNotExists ? $this->createNewCell($pCoordinateUpper) : null; } /** * Get cell at a specific coordinate by using numeric cell coordinates. * * @param int $columnIndex Numeric column coordinate of the cell * @param int $row Numeric row coordinate of the cell * @param bool $createIfNotExists Flag indicating whether a new cell should be created if it doesn't * already exist, or a null should be returned instead * * @return null|Cell Cell that was found/created or null */ public function getCellByColumnAndRow($columnIndex, $row, $createIfNotExists = true) { $columnLetter = Coordinate::stringFromColumnIndex($columnIndex); $coordinate = $columnLetter . $row; if ($this->cellCollection->has($coordinate)) { return $this->cellCollection->get($coordinate); } // Create new cell object, if required return $createIfNotExists ? $this->createNewCell($coordinate) : null; } /** * Create a new cell at the specified coordinate. * * @param string $pCoordinate Coordinate of the cell * * @return Cell Cell that was created */ private function createNewCell($pCoordinate) { $cell = new Cell(null, DataType::TYPE_NULL, $this); $this->cellCollection->add($pCoordinate, $cell); $this->cellCollectionIsSorted = false; // Coordinates $aCoordinates = Coordinate::coordinateFromString($pCoordinate); if (Coordinate::columnIndexFromString($this->cachedHighestColumn) < Coordinate::columnIndexFromString($aCoordinates[0])) { $this->cachedHighestColumn = $aCoordinates[0]; } if ($aCoordinates[1] > $this->cachedHighestRow) { $this->cachedHighestRow = $aCoordinates[1]; } // Cell needs appropriate xfIndex from dimensions records // but don't create dimension records if they don't already exist $rowDimension = $this->getRowDimension($aCoordinates[1], false); $columnDimension = $this->getColumnDimension($aCoordinates[0], false); if ($rowDimension !== null && $rowDimension->getXfIndex() > 0) { // then there is a row dimension with explicit style, assign it to the cell $cell->setXfIndex($rowDimension->getXfIndex()); } elseif ($columnDimension !== null && $columnDimension->getXfIndex() > 0) { // then there is a column dimension, assign it to the cell $cell->setXfIndex($columnDimension->getXfIndex()); } return $cell; } /** * Does the cell at a specific coordinate exist? * * @param string $pCoordinate Coordinate of the cell eg: 'A1' * * @throws Exception * * @return bool */ public function cellExists($pCoordinate) { // Worksheet reference? if (strpos($pCoordinate, '!') !== false) { $worksheetReference = self::extractSheetTitle($pCoordinate, true); return $this->parent->getSheetByName($worksheetReference[0])->cellExists(strtoupper($worksheetReference[1])); } // Named range? if ((!preg_match('/^' . Calculation::CALCULATION_REGEXP_CELLREF . '$/i', $pCoordinate, $matches)) && (preg_match('/^' . Calculation::CALCULATION_REGEXP_NAMEDRANGE . '$/i', $pCoordinate, $matches))) { $namedRange = NamedRange::resolveRange($pCoordinate, $this); if ($namedRange !== null) { $pCoordinate = $namedRange->getRange(); if ($this->getHashCode() != $namedRange->getWorksheet()->getHashCode()) { if (!$namedRange->getLocalOnly()) { return $namedRange->getWorksheet()->cellExists($pCoordinate); } throw new Exception('Named range ' . $namedRange->getName() . ' is not accessible from within sheet ' . $this->getTitle()); } } else { return false; } } // Uppercase coordinate $pCoordinate = strtoupper($pCoordinate); if (Coordinate::coordinateIsRange($pCoordinate)) { throw new Exception('Cell coordinate can not be a range of cells.'); } elseif (strpos($pCoordinate, '$') !== false) { throw new Exception('Cell coordinate must not be absolute.'); } // Cell exists? return $this->cellCollection->has($pCoordinate); } /** * Cell at a specific coordinate by using numeric cell coordinates exists? * * @param int $columnIndex Numeric column coordinate of the cell * @param int $row Numeric row coordinate of the cell * * @return bool */ public function cellExistsByColumnAndRow($columnIndex, $row) { return $this->cellExists(Coordinate::stringFromColumnIndex($columnIndex) . $row); } /** * Get row dimension at a specific row. * * @param int $pRow Numeric index of the row * @param bool $create * * @return RowDimension */ public function getRowDimension($pRow, $create = true) { // Found $found = null; // Get row dimension if (!isset($this->rowDimensions[$pRow])) { if (!$create) { return null; } $this->rowDimensions[$pRow] = new RowDimension($pRow); $this->cachedHighestRow = max($this->cachedHighestRow, $pRow); } return $this->rowDimensions[$pRow]; } /** * Get column dimension at a specific column. * * @param string $pColumn String index of the column eg: 'A' * @param bool $create * * @return ColumnDimension */ public function getColumnDimension($pColumn, $create = true) { // Uppercase coordinate $pColumn = strtoupper($pColumn); // Fetch dimensions if (!isset($this->columnDimensions[$pColumn])) { if (!$create) { return null; } $this->columnDimensions[$pColumn] = new ColumnDimension($pColumn); if (Coordinate::columnIndexFromString($this->cachedHighestColumn) < Coordinate::columnIndexFromString($pColumn)) { $this->cachedHighestColumn = $pColumn; } } return $this->columnDimensions[$pColumn]; } /** * Get column dimension at a specific column by using numeric cell coordinates. * * @param int $columnIndex Numeric column coordinate of the cell * * @return ColumnDimension */ public function getColumnDimensionByColumn($columnIndex) { return $this->getColumnDimension(Coordinate::stringFromColumnIndex($columnIndex)); } /** * Get styles. * * @return Style[] */ public function getStyles() { return $this->styles; } /** * Get style for cell. * * @param string $pCellCoordinate Cell coordinate (or range) to get style for, eg: 'A1' * * @throws Exception * * @return Style */ public function getStyle($pCellCoordinate) { // set this sheet as active $this->parent->setActiveSheetIndex($this->parent->getIndex($this)); // set cell coordinate as active $this->setSelectedCells(strtoupper($pCellCoordinate)); return $this->parent->getCellXfSupervisor(); } /** * Get conditional styles for a cell. * * @param string $pCoordinate eg: 'A1' * * @return Conditional[] */ public function getConditionalStyles($pCoordinate) { $pCoordinate = strtoupper($pCoordinate); if (!isset($this->conditionalStylesCollection[$pCoordinate])) { $this->conditionalStylesCollection[$pCoordinate] = []; } return $this->conditionalStylesCollection[$pCoordinate]; } /** * Do conditional styles exist for this cell? * * @param string $pCoordinate eg: 'A1' * * @return bool */ public function conditionalStylesExists($pCoordinate) { return isset($this->conditionalStylesCollection[strtoupper($pCoordinate)]); } /** * Removes conditional styles for a cell. * * @param string $pCoordinate eg: 'A1' * * @return Worksheet */ public function removeConditionalStyles($pCoordinate) { unset($this->conditionalStylesCollection[strtoupper($pCoordinate)]); return $this; } /** * Get collection of conditional styles. * * @return array */ public function getConditionalStylesCollection() { return $this->conditionalStylesCollection; } /** * Set conditional styles. * * @param string $pCoordinate eg: 'A1' * @param $pValue Conditional[] * * @return Worksheet */ public function setConditionalStyles($pCoordinate, $pValue) { $this->conditionalStylesCollection[strtoupper($pCoordinate)] = $pValue; return $this; } /** * Get style for cell by using numeric cell coordinates. * * @param int $columnIndex1 Numeric column coordinate of the cell * @param int $row1 Numeric row coordinate of the cell * @param null|int $columnIndex2 Numeric column coordinate of the range cell * @param null|int $row2 Numeric row coordinate of the range cell * * @return Style */ public function getStyleByColumnAndRow($columnIndex1, $row1, $columnIndex2 = null, $row2 = null) { if ($columnIndex2 !== null && $row2 !== null) { $cellRange = Coordinate::stringFromColumnIndex($columnIndex1) . $row1 . ':' . Coordinate::stringFromColumnIndex($columnIndex2) . $row2; return $this->getStyle($cellRange); } return $this->getStyle(Coordinate::stringFromColumnIndex($columnIndex1) . $row1); } /** * Duplicate cell style to a range of cells. * * Please note that this will overwrite existing cell styles for cells in range! * * @param Style $pCellStyle Cell style to duplicate * @param string $pRange Range of cells (i.e. "A1:B10"), or just one cell (i.e. "A1") * * @throws Exception * * @return Worksheet */ public function duplicateStyle(Style $pCellStyle, $pRange) { // Add the style to the workbook if necessary $workbook = $this->parent; if ($existingStyle = $this->parent->getCellXfByHashCode($pCellStyle->getHashCode())) { // there is already such cell Xf in our collection $xfIndex = $existingStyle->getIndex(); } else { // we don't have such a cell Xf, need to add $workbook->addCellXf($pCellStyle); $xfIndex = $pCellStyle->getIndex(); } // Calculate range outer borders list($rangeStart, $rangeEnd) = Coordinate::rangeBoundaries($pRange . ':' . $pRange); // Make sure we can loop upwards on rows and columns if ($rangeStart[0] > $rangeEnd[0] && $rangeStart[1] > $rangeEnd[1]) { $tmp = $rangeStart; $rangeStart = $rangeEnd; $rangeEnd = $tmp; } // Loop through cells and apply styles for ($col = $rangeStart[0]; $col <= $rangeEnd[0]; ++$col) { for ($row = $rangeStart[1]; $row <= $rangeEnd[1]; ++$row) { $this->getCell(Coordinate::stringFromColumnIndex($col) . $row)->setXfIndex($xfIndex); } } return $this; } /** * Duplicate conditional style to a range of cells. * * Please note that this will overwrite existing cell styles for cells in range! * * @param Conditional[] $pCellStyle Cell style to duplicate * @param string $pRange Range of cells (i.e. "A1:B10"), or just one cell (i.e. "A1") * * @throws Exception * * @return Worksheet */ public function duplicateConditionalStyle(array $pCellStyle, $pRange = '') { foreach ($pCellStyle as $cellStyle) { if (!($cellStyle instanceof Conditional)) { throw new Exception('Style is not a conditional style'); } } // Calculate range outer borders list($rangeStart, $rangeEnd) = Coordinate::rangeBoundaries($pRange . ':' . $pRange); // Make sure we can loop upwards on rows and columns if ($rangeStart[0] > $rangeEnd[0] && $rangeStart[1] > $rangeEnd[1]) { $tmp = $rangeStart; $rangeStart = $rangeEnd; $rangeEnd = $tmp; } // Loop through cells and apply styles for ($col = $rangeStart[0]; $col <= $rangeEnd[0]; ++$col) { for ($row = $rangeStart[1]; $row <= $rangeEnd[1]; ++$row) { $this->setConditionalStyles(Coordinate::stringFromColumnIndex($col) . $row, $pCellStyle); } } return $this; } /** * Set break on a cell. * * @param string $pCoordinate Cell coordinate (e.g. A1) * @param int $pBreak Break type (type of Worksheet::BREAK_*) * * @throws Exception * * @return Worksheet */ public function setBreak($pCoordinate, $pBreak) { // Uppercase coordinate $pCoordinate = strtoupper($pCoordinate); if ($pCoordinate != '') { if ($pBreak == self::BREAK_NONE) { if (isset($this->breaks[$pCoordinate])) { unset($this->breaks[$pCoordinate]); } } else { $this->breaks[$pCoordinate] = $pBreak; } } else { throw new Exception('No cell coordinate specified.'); } return $this; } /** * Set break on a cell by using numeric cell coordinates. * * @param int $columnIndex Numeric column coordinate of the cell * @param int $row Numeric row coordinate of the cell * @param int $break Break type (type of Worksheet::BREAK_*) * * @return Worksheet */ public function setBreakByColumnAndRow($columnIndex, $row, $break) { return $this->setBreak(Coordinate::stringFromColumnIndex($columnIndex) . $row, $break); } /** * Get breaks. * * @return array[] */ public function getBreaks() { return $this->breaks; } /** * Set merge on a cell range. * * @param string $pRange Cell range (e.g. A1:E1) * * @throws Exception * * @return Worksheet */ public function mergeCells($pRange) { // Uppercase coordinate $pRange = strtoupper($pRange); if (strpos($pRange, ':') !== false) { $this->mergeCells[$pRange] = $pRange; // make sure cells are created // get the cells in the range $aReferences = Coordinate::extractAllCellReferencesInRange($pRange); // create upper left cell if it does not already exist $upperLeft = $aReferences[0]; if (!$this->cellExists($upperLeft)) { $this->getCell($upperLeft)->setValueExplicit(null, DataType::TYPE_NULL); } // Blank out the rest of the cells in the range (if they exist) $count = count($aReferences); for ($i = 1; $i < $count; ++$i) { if ($this->cellExists($aReferences[$i])) { $this->getCell($aReferences[$i])->setValueExplicit(null, DataType::TYPE_NULL); } } } else { throw new Exception('Merge must be set on a range of cells.'); } return $this; } /** * Set merge on a cell range by using numeric cell coordinates. * * @param int $columnIndex1 Numeric column coordinate of the first cell * @param int $row1 Numeric row coordinate of the first cell * @param int $columnIndex2 Numeric column coordinate of the last cell * @param int $row2 Numeric row coordinate of the last cell * * @throws Exception * * @return Worksheet */ public function mergeCellsByColumnAndRow($columnIndex1, $row1, $columnIndex2, $row2) { $cellRange = Coordinate::stringFromColumnIndex($columnIndex1) . $row1 . ':' . Coordinate::stringFromColumnIndex($columnIndex2) . $row2; return $this->mergeCells($cellRange); } /** * Remove merge on a cell range. * * @param string $pRange Cell range (e.g. A1:E1) * * @throws Exception * * @return Worksheet */ public function unmergeCells($pRange) { // Uppercase coordinate $pRange = strtoupper($pRange); if (strpos($pRange, ':') !== false) { if (isset($this->mergeCells[$pRange])) { unset($this->mergeCells[$pRange]); } else { throw new Exception('Cell range ' . $pRange . ' not known as merged.'); } } else { throw new Exception('Merge can only be removed from a range of cells.'); } return $this; } /** * Remove merge on a cell range by using numeric cell coordinates. * * @param int $columnIndex1 Numeric column coordinate of the first cell * @param int $row1 Numeric row coordinate of the first cell * @param int $columnIndex2 Numeric column coordinate of the last cell * @param int $row2 Numeric row coordinate of the last cell * * @throws Exception * * @return Worksheet */ public function unmergeCellsByColumnAndRow($columnIndex1, $row1, $columnIndex2, $row2) { $cellRange = Coordinate::stringFromColumnIndex($columnIndex1) . $row1 . ':' . Coordinate::stringFromColumnIndex($columnIndex2) . $row2; return $this->unmergeCells($cellRange); } /** * Get merge cells array. * * @return array[] */ public function getMergeCells() { return $this->mergeCells; } /** * Set merge cells array for the entire sheet. Use instead mergeCells() to merge * a single cell range. * * @param array $pValue * * @return Worksheet */ public function setMergeCells(array $pValue) { $this->mergeCells = $pValue; return $this; } /** * Set protection on a cell range. * * @param string $pRange Cell (e.g. A1) or cell range (e.g. A1:E1) * @param string $pPassword Password to unlock the protection * @param bool $pAlreadyHashed If the password has already been hashed, set this to true * * @return Worksheet */ public function protectCells($pRange, $pPassword, $pAlreadyHashed = false) { // Uppercase coordinate $pRange = strtoupper($pRange); if (!$pAlreadyHashed) { $pPassword = Shared\PasswordHasher::hashPassword($pPassword); } $this->protectedCells[$pRange] = $pPassword; return $this; } /** * Set protection on a cell range by using numeric cell coordinates. * * @param int $columnIndex1 Numeric column coordinate of the first cell * @param int $row1 Numeric row coordinate of the first cell * @param int $columnIndex2 Numeric column coordinate of the last cell * @param int $row2 Numeric row coordinate of the last cell * @param string $password Password to unlock the protection * @param bool $alreadyHashed If the password has already been hashed, set this to true * * @return Worksheet */ public function protectCellsByColumnAndRow($columnIndex1, $row1, $columnIndex2, $row2, $password, $alreadyHashed = false) { $cellRange = Coordinate::stringFromColumnIndex($columnIndex1) . $row1 . ':' . Coordinate::stringFromColumnIndex($columnIndex2) . $row2; return $this->protectCells($cellRange, $password, $alreadyHashed); } /** * Remove protection on a cell range. * * @param string $pRange Cell (e.g. A1) or cell range (e.g. A1:E1) * * @throws Exception * * @return Worksheet */ public function unprotectCells($pRange) { // Uppercase coordinate $pRange = strtoupper($pRange); if (isset($this->protectedCells[$pRange])) { unset($this->protectedCells[$pRange]); } else { throw new Exception('Cell range ' . $pRange . ' not known as protected.'); } return $this; } /** * Remove protection on a cell range by using numeric cell coordinates. * * @param int $columnIndex1 Numeric column coordinate of the first cell * @param int $row1 Numeric row coordinate of the first cell * @param int $columnIndex2 Numeric column coordinate of the last cell * @param int $row2 Numeric row coordinate of the last cell * * @throws Exception * * @return Worksheet */ public function unprotectCellsByColumnAndRow($columnIndex1, $row1, $columnIndex2, $row2) { $cellRange = Coordinate::stringFromColumnIndex($columnIndex1) . $row1 . ':' . Coordinate::stringFromColumnIndex($columnIndex2) . $row2; return $this->unprotectCells($cellRange); } /** * Get protected cells. * * @return array[] */ public function getProtectedCells() { return $this->protectedCells; } /** * Get Autofilter. * * @return AutoFilter */ public function getAutoFilter() { return $this->autoFilter; } /** * Set AutoFilter. * * @param AutoFilter|string $pValue * A simple string containing a Cell range like 'A1:E10' is permitted for backward compatibility * * @throws Exception * * @return Worksheet */ public function setAutoFilter($pValue) { if (is_string($pValue)) { $this->autoFilter->setRange($pValue); } elseif (is_object($pValue) && ($pValue instanceof AutoFilter)) { $this->autoFilter = $pValue; } return $this; } /** * Set Autofilter Range by using numeric cell coordinates. * * @param int $columnIndex1 Numeric column coordinate of the first cell * @param int $row1 Numeric row coordinate of the first cell * @param int $columnIndex2 Numeric column coordinate of the second cell * @param int $row2 Numeric row coordinate of the second cell * * @throws Exception * * @return Worksheet */ public function setAutoFilterByColumnAndRow($columnIndex1, $row1, $columnIndex2, $row2) { return $this->setAutoFilter( Coordinate::stringFromColumnIndex($columnIndex1) . $row1 . ':' . Coordinate::stringFromColumnIndex($columnIndex2) . $row2 ); } /** * Remove autofilter. * * @return Worksheet */ public function removeAutoFilter() { $this->autoFilter->setRange(null); return $this; } /** * Get Freeze Pane. * * @return string */ public function getFreezePane() { return $this->freezePane; } /** * Freeze Pane. * * Examples: * * - A2 will freeze the rows above cell A2 (i.e row 1) * - B1 will freeze the columns to the left of cell B1 (i.e column A) * - B2 will freeze the rows above and to the left of cell B2 (i.e row 1 and column A) * * @param null|string $cell Position of the split * @param null|string $topLeftCell default position of the right bottom pane * * @throws Exception * * @return Worksheet */ public function freezePane($cell, $topLeftCell = null) { if (is_string($cell) && Coordinate::coordinateIsRange($cell)) { throw new Exception('Freeze pane can not be set on a range of cells.'); } if ($cell !== null && $topLeftCell === null) { $coordinate = Coordinate::coordinateFromString($cell); $topLeftCell = $coordinate[0] . $coordinate[1]; } $this->freezePane = $cell; $this->topLeftCell = $topLeftCell; return $this; } /** * Freeze Pane by using numeric cell coordinates. * * @param int $columnIndex Numeric column coordinate of the cell * @param int $row Numeric row coordinate of the cell * * @return Worksheet */ public function freezePaneByColumnAndRow($columnIndex, $row) { return $this->freezePane(Coordinate::stringFromColumnIndex($columnIndex) . $row); } /** * Unfreeze Pane. * * @return Worksheet */ public function unfreezePane() { return $this->freezePane(null); } /** * Get the default position of the right bottom pane. * * @return int */ public function getTopLeftCell() { return $this->topLeftCell; } /** * Insert a new row, updating all possible related data. * * @param int $pBefore Insert before this one * @param int $pNumRows Number of rows to insert * * @throws Exception * * @return Worksheet */ public function insertNewRowBefore($pBefore, $pNumRows = 1) { if ($pBefore >= 1) { $objReferenceHelper = ReferenceHelper::getInstance(); $objReferenceHelper->insertNewBefore('A' . $pBefore, 0, $pNumRows, $this); } else { throw new Exception('Rows can only be inserted before at least row 1.'); } return $this; } /** * Insert a new column, updating all possible related data. * * @param string $pBefore Insert before this one, eg: 'A' * @param int $pNumCols Number of columns to insert * * @throws Exception * * @return Worksheet */ public function insertNewColumnBefore($pBefore, $pNumCols = 1) { if (!is_numeric($pBefore)) { $objReferenceHelper = ReferenceHelper::getInstance(); $objReferenceHelper->insertNewBefore($pBefore . '1', $pNumCols, 0, $this); } else { throw new Exception('Column references should not be numeric.'); } return $this; } /** * Insert a new column, updating all possible related data. * * @param int $beforeColumnIndex Insert before this one (numeric column coordinate of the cell) * @param int $pNumCols Number of columns to insert * * @throws Exception * * @return Worksheet */ public function insertNewColumnBeforeByIndex($beforeColumnIndex, $pNumCols = 1) { if ($beforeColumnIndex >= 1) { return $this->insertNewColumnBefore(Coordinate::stringFromColumnIndex($beforeColumnIndex), $pNumCols); } throw new Exception('Columns can only be inserted before at least column A (1).'); } /** * Delete a row, updating all possible related data. * * @param int $pRow Remove starting with this one * @param int $pNumRows Number of rows to remove * * @throws Exception * * @return Worksheet */ public function removeRow($pRow, $pNumRows = 1) { if ($pRow >= 1) { $highestRow = $this->getHighestDataRow(); $objReferenceHelper = ReferenceHelper::getInstance(); $objReferenceHelper->insertNewBefore('A' . ($pRow + $pNumRows), 0, -$pNumRows, $this); for ($r = 0; $r < $pNumRows; ++$r) { $this->getCellCollection()->removeRow($highestRow); --$highestRow; } } else { throw new Exception('Rows to be deleted should at least start from row 1.'); } return $this; } /** * Remove a column, updating all possible related data. * * @param string $pColumn Remove starting with this one, eg: 'A' * @param int $pNumCols Number of columns to remove * * @throws Exception * * @return Worksheet */ public function removeColumn($pColumn, $pNumCols = 1) { if (!is_numeric($pColumn)) { $highestColumn = $this->getHighestDataColumn(); $pColumn = Coordinate::stringFromColumnIndex(Coordinate::columnIndexFromString($pColumn) + $pNumCols); $objReferenceHelper = ReferenceHelper::getInstance(); $objReferenceHelper->insertNewBefore($pColumn . '1', -$pNumCols, 0, $this); for ($c = 0; $c < $pNumCols; ++$c) { $this->getCellCollection()->removeColumn($highestColumn); $highestColumn = Coordinate::stringFromColumnIndex(Coordinate::columnIndexFromString($highestColumn) - 1); } } else { throw new Exception('Column references should not be numeric.'); } return $this; } /** * Remove a column, updating all possible related data. * * @param int $columnIndex Remove starting with this one (numeric column coordinate of the cell) * @param int $numColumns Number of columns to remove * * @throws Exception * * @return Worksheet */ public function removeColumnByIndex($columnIndex, $numColumns = 1) { if ($columnIndex >= 1) { return $this->removeColumn(Coordinate::stringFromColumnIndex($columnIndex), $numColumns); } throw new Exception('Columns to be deleted should at least start from column A (1)'); } /** * Show gridlines? * * @return bool */ public function getShowGridlines() { return $this->showGridlines; } /** * Set show gridlines. * * @param bool $pValue Show gridlines (true/false) * * @return Worksheet */ public function setShowGridlines($pValue) { $this->showGridlines = $pValue; return $this; } /** * Print gridlines? * * @return bool */ public function getPrintGridlines() { return $this->printGridlines; } /** * Set print gridlines. * * @param bool $pValue Print gridlines (true/false) * * @return Worksheet */ public function setPrintGridlines($pValue) { $this->printGridlines = $pValue; return $this; } /** * Show row and column headers? * * @return bool */ public function getShowRowColHeaders() { return $this->showRowColHeaders; } /** * Set show row and column headers. * * @param bool $pValue Show row and column headers (true/false) * * @return Worksheet */ public function setShowRowColHeaders($pValue) { $this->showRowColHeaders = $pValue; return $this; } /** * Show summary below? (Row/Column outlining). * * @return bool */ public function getShowSummaryBelow() { return $this->showSummaryBelow; } /** * Set show summary below. * * @param bool $pValue Show summary below (true/false) * * @return Worksheet */ public function setShowSummaryBelow($pValue) { $this->showSummaryBelow = $pValue; return $this; } /** * Show summary right? (Row/Column outlining). * * @return bool */ public function getShowSummaryRight() { return $this->showSummaryRight; } /** * Set show summary right. * * @param bool $pValue Show summary right (true/false) * * @return Worksheet */ public function setShowSummaryRight($pValue) { $this->showSummaryRight = $pValue; return $this; } /** * Get comments. * * @return Comment[] */ public function getComments() { return $this->comments; } /** * Set comments array for the entire sheet. * * @param Comment[] $pValue * * @return Worksheet */ public function setComments(array $pValue) { $this->comments = $pValue; return $this; } /** * Get comment for cell. * * @param string $pCellCoordinate Cell coordinate to get comment for, eg: 'A1' * * @throws Exception * * @return Comment */ public function getComment($pCellCoordinate) { // Uppercase coordinate $pCellCoordinate = strtoupper($pCellCoordinate); if (Coordinate::coordinateIsRange($pCellCoordinate)) { throw new Exception('Cell coordinate string can not be a range of cells.'); } elseif (strpos($pCellCoordinate, '$') !== false) { throw new Exception('Cell coordinate string must not be absolute.'); } elseif ($pCellCoordinate == '') { throw new Exception('Cell coordinate can not be zero-length string.'); } // Check if we already have a comment for this cell. if (isset($this->comments[$pCellCoordinate])) { return $this->comments[$pCellCoordinate]; } // If not, create a new comment. $newComment = new Comment(); $this->comments[$pCellCoordinate] = $newComment; return $newComment; } /** * Get comment for cell by using numeric cell coordinates. * * @param int $columnIndex Numeric column coordinate of the cell * @param int $row Numeric row coordinate of the cell * * @return Comment */ public function getCommentByColumnAndRow($columnIndex, $row) { return $this->getComment(Coordinate::stringFromColumnIndex($columnIndex) . $row); } /** * Get active cell. * * @return string Example: 'A1' */ public function getActiveCell() { return $this->activeCell; } /** * Get selected cells. * * @return string */ public function getSelectedCells() { return $this->selectedCells; } /** * Selected cell. * * @param string $pCoordinate Cell (i.e. A1) * * @return Worksheet */ public function setSelectedCell($pCoordinate) { return $this->setSelectedCells($pCoordinate); } /** * Select a range of cells. * * @param string $pCoordinate Cell range, examples: 'A1', 'B2:G5', 'A:C', '3:6' * * @return Worksheet */ public function setSelectedCells($pCoordinate) { // Uppercase coordinate $pCoordinate = strtoupper($pCoordinate); // Convert 'A' to 'A:A' $pCoordinate = preg_replace('/^([A-Z]+)$/', '${1}:${1}', $pCoordinate); // Convert '1' to '1:1' $pCoordinate = preg_replace('/^(\d+)$/', '${1}:${1}', $pCoordinate); // Convert 'A:C' to 'A1:C1048576' $pCoordinate = preg_replace('/^([A-Z]+):([A-Z]+)$/', '${1}1:${2}1048576', $pCoordinate); // Convert '1:3' to 'A1:XFD3' $pCoordinate = preg_replace('/^(\d+):(\d+)$/', 'A${1}:XFD${2}', $pCoordinate); if (Coordinate::coordinateIsRange($pCoordinate)) { list($first) = Coordinate::splitRange($pCoordinate); $this->activeCell = $first[0]; } else { $this->activeCell = $pCoordinate; } $this->selectedCells = $pCoordinate; return $this; } /** * Selected cell by using numeric cell coordinates. * * @param int $columnIndex Numeric column coordinate of the cell * @param int $row Numeric row coordinate of the cell * * @throws Exception * * @return Worksheet */ public function setSelectedCellByColumnAndRow($columnIndex, $row) { return $this->setSelectedCells(Coordinate::stringFromColumnIndex($columnIndex) . $row); } /** * Get right-to-left. * * @return bool */ public function getRightToLeft() { return $this->rightToLeft; } /** * Set right-to-left. * * @param bool $value Right-to-left true/false * * @return Worksheet */ public function setRightToLeft($value) { $this->rightToLeft = $value; return $this; } /** * Fill worksheet from values in array. * * @param array $source Source array * @param mixed $nullValue Value in source array that stands for blank cell * @param string $startCell Insert array starting from this cell address as the top left coordinate * @param bool $strictNullComparison Apply strict comparison when testing for null values in the array * * @throws Exception * * @return Worksheet */ public function fromArray(array $source, $nullValue = null, $startCell = 'A1', $strictNullComparison = false) { // Convert a 1-D array to 2-D (for ease of looping) if (!is_array(end($source))) { $source = [$source]; } // start coordinate list($startColumn, $startRow) = Coordinate::coordinateFromString($startCell); // Loop through $source foreach ($source as $rowData) { $currentColumn = $startColumn; foreach ($rowData as $cellValue) { if ($strictNullComparison) { if ($cellValue !== $nullValue) { // Set cell value $this->getCell($currentColumn . $startRow)->setValue($cellValue); } } else { if ($cellValue != $nullValue) { // Set cell value $this->getCell($currentColumn . $startRow)->setValue($cellValue); } } ++$currentColumn; } ++$startRow; } return $this; } /** * Create array from a range of cells. * * @param string $pRange Range of cells (i.e. "A1:B10"), or just one cell (i.e. "A1") * @param mixed $nullValue Value returned in the array entry if a cell doesn't exist * @param bool $calculateFormulas Should formulas be calculated? * @param bool $formatData Should formatting be applied to cell values? * @param bool $returnCellRef False - Return a simple array of rows and columns indexed by number counting from zero * True - Return rows and columns indexed by their actual row and column IDs * * @return array */ public function rangeToArray($pRange, $nullValue = null, $calculateFormulas = true, $formatData = true, $returnCellRef = false) { // Returnvalue $returnValue = []; // Identify the range that we need to extract from the worksheet list($rangeStart, $rangeEnd) = Coordinate::rangeBoundaries($pRange); $minCol = Coordinate::stringFromColumnIndex($rangeStart[0]); $minRow = $rangeStart[1]; $maxCol = Coordinate::stringFromColumnIndex($rangeEnd[0]); $maxRow = $rangeEnd[1]; ++$maxCol; // Loop through rows $r = -1; for ($row = $minRow; $row <= $maxRow; ++$row) { $rRef = ($returnCellRef) ? $row : ++$r; $c = -1; // Loop through columns in the current row for ($col = $minCol; $col != $maxCol; ++$col) { $cRef = ($returnCellRef) ? $col : ++$c; // Using getCell() will create a new cell if it doesn't already exist. We don't want that to happen // so we test and retrieve directly against cellCollection if ($this->cellCollection->has($col . $row)) { // Cell exists $cell = $this->cellCollection->get($col . $row); if ($cell->getValue() !== null) { if ($cell->getValue() instanceof RichText) { $returnValue[$rRef][$cRef] = $cell->getValue()->getPlainText(); } else { if ($calculateFormulas) { $returnValue[$rRef][$cRef] = $cell->getCalculatedValue(); } else { $returnValue[$rRef][$cRef] = $cell->getValue(); } } if ($formatData) { $style = $this->parent->getCellXfByIndex($cell->getXfIndex()); $returnValue[$rRef][$cRef] = NumberFormat::toFormattedString( $returnValue[$rRef][$cRef], ($style && $style->getNumberFormat()) ? $style->getNumberFormat()->getFormatCode() : NumberFormat::FORMAT_GENERAL ); } } else { // Cell holds a NULL $returnValue[$rRef][$cRef] = $nullValue; } } else { // Cell doesn't exist $returnValue[$rRef][$cRef] = $nullValue; } } } // Return return $returnValue; } /** * Create array from a range of cells. * * @param string $pNamedRange Name of the Named Range * @param mixed $nullValue Value returned in the array entry if a cell doesn't exist * @param bool $calculateFormulas Should formulas be calculated? * @param bool $formatData Should formatting be applied to cell values? * @param bool $returnCellRef False - Return a simple array of rows and columns indexed by number counting from zero * True - Return rows and columns indexed by their actual row and column IDs * * @throws Exception * * @return array */ public function namedRangeToArray($pNamedRange, $nullValue = null, $calculateFormulas = true, $formatData = true, $returnCellRef = false) { $namedRange = NamedRange::resolveRange($pNamedRange, $this); if ($namedRange !== null) { $pWorkSheet = $namedRange->getWorksheet(); $pCellRange = $namedRange->getRange(); return $pWorkSheet->rangeToArray($pCellRange, $nullValue, $calculateFormulas, $formatData, $returnCellRef); } throw new Exception('Named Range ' . $pNamedRange . ' does not exist.'); } /** * Create array from worksheet. * * @param mixed $nullValue Value returned in the array entry if a cell doesn't exist * @param bool $calculateFormulas Should formulas be calculated? * @param bool $formatData Should formatting be applied to cell values? * @param bool $returnCellRef False - Return a simple array of rows and columns indexed by number counting from zero * True - Return rows and columns indexed by their actual row and column IDs * * @return array */ public function toArray($nullValue = null, $calculateFormulas = true, $formatData = true, $returnCellRef = false) { // Garbage collect... $this->garbageCollect(); // Identify the range that we need to extract from the worksheet $maxCol = $this->getHighestColumn(); $maxRow = $this->getHighestRow(); // Return return $this->rangeToArray('A1:' . $maxCol . $maxRow, $nullValue, $calculateFormulas, $formatData, $returnCellRef); } /** * Get row iterator. * * @param int $startRow The row number at which to start iterating * @param int $endRow The row number at which to stop iterating * * @return RowIterator */ public function getRowIterator($startRow = 1, $endRow = null) { return new RowIterator($this, $startRow, $endRow); } /** * Get column iterator. * * @param string $startColumn The column address at which to start iterating * @param string $endColumn The column address at which to stop iterating * * @return ColumnIterator */ public function getColumnIterator($startColumn = 'A', $endColumn = null) { return new ColumnIterator($this, $startColumn, $endColumn); } /** * Run PhpSpreadsheet garbage collector. * * @return Worksheet */ public function garbageCollect() { // Flush cache $this->cellCollection->get('A1'); // Lookup highest column and highest row if cells are cleaned $colRow = $this->cellCollection->getHighestRowAndColumn(); $highestRow = $colRow['row']; $highestColumn = Coordinate::columnIndexFromString($colRow['column']); // Loop through column dimensions foreach ($this->columnDimensions as $dimension) { $highestColumn = max($highestColumn, Coordinate::columnIndexFromString($dimension->getColumnIndex())); } // Loop through row dimensions foreach ($this->rowDimensions as $dimension) { $highestRow = max($highestRow, $dimension->getRowIndex()); } // Cache values if ($highestColumn < 1) { $this->cachedHighestColumn = 'A'; } else { $this->cachedHighestColumn = Coordinate::stringFromColumnIndex($highestColumn); } $this->cachedHighestRow = $highestRow; // Return return $this; } /** * Get hash code. * * @return string Hash code */ public function getHashCode() { if ($this->dirty) { $this->hash = md5($this->title . $this->autoFilter . ($this->protection->isProtectionEnabled() ? 't' : 'f') . __CLASS__); $this->dirty = false; } return $this->hash; } /** * Extract worksheet title from range. * * Example: extractSheetTitle("testSheet!A1") ==> 'A1' * Example: extractSheetTitle("'testSheet 1'!A1", true) ==> ['testSheet 1', 'A1']; * * @param string $pRange Range to extract title from * @param bool $returnRange Return range? (see example) * * @return mixed */ public static function extractSheetTitle($pRange, $returnRange = false) { // Sheet title included? if (($sep = strrpos($pRange, '!')) === false) { return $returnRange ? ['', $pRange] : ''; } if ($returnRange) { return [substr($pRange, 0, $sep), substr($pRange, $sep + 1)]; } return substr($pRange, $sep + 1); } /** * Get hyperlink. * * @param string $pCellCoordinate Cell coordinate to get hyperlink for, eg: 'A1' * * @return Hyperlink */ public function getHyperlink($pCellCoordinate) { // return hyperlink if we already have one if (isset($this->hyperlinkCollection[$pCellCoordinate])) { return $this->hyperlinkCollection[$pCellCoordinate]; } // else create hyperlink $this->hyperlinkCollection[$pCellCoordinate] = new Hyperlink(); return $this->hyperlinkCollection[$pCellCoordinate]; } /** * Set hyperlink. * * @param string $pCellCoordinate Cell coordinate to insert hyperlink, eg: 'A1' * @param null|Hyperlink $pHyperlink * * @return Worksheet */ public function setHyperlink($pCellCoordinate, Hyperlink $pHyperlink = null) { if ($pHyperlink === null) { unset($this->hyperlinkCollection[$pCellCoordinate]); } else { $this->hyperlinkCollection[$pCellCoordinate] = $pHyperlink; } return $this; } /** * Hyperlink at a specific coordinate exists? * * @param string $pCoordinate eg: 'A1' * * @return bool */ public function hyperlinkExists($pCoordinate) { return isset($this->hyperlinkCollection[$pCoordinate]); } /** * Get collection of hyperlinks. * * @return Hyperlink[] */ public function getHyperlinkCollection() { return $this->hyperlinkCollection; } /** * Get data validation. * * @param string $pCellCoordinate Cell coordinate to get data validation for, eg: 'A1' * * @return DataValidation */ public function getDataValidation($pCellCoordinate) { // return data validation if we already have one if (isset($this->dataValidationCollection[$pCellCoordinate])) { return $this->dataValidationCollection[$pCellCoordinate]; } // else create data validation $this->dataValidationCollection[$pCellCoordinate] = new DataValidation(); return $this->dataValidationCollection[$pCellCoordinate]; } /** * Set data validation. * * @param string $pCellCoordinate Cell coordinate to insert data validation, eg: 'A1' * @param null|DataValidation $pDataValidation * * @return Worksheet */ public function setDataValidation($pCellCoordinate, DataValidation $pDataValidation = null) { if ($pDataValidation === null) { unset($this->dataValidationCollection[$pCellCoordinate]); } else { $this->dataValidationCollection[$pCellCoordinate] = $pDataValidation; } return $this; } /** * Data validation at a specific coordinate exists? * * @param string $pCoordinate eg: 'A1' * * @return bool */ public function dataValidationExists($pCoordinate) { return isset($this->dataValidationCollection[$pCoordinate]); } /** * Get collection of data validations. * * @return DataValidation[] */ public function getDataValidationCollection() { return $this->dataValidationCollection; } /** * Accepts a range, returning it as a range that falls within the current highest row and column of the worksheet. * * @param string $range * * @return string Adjusted range value */ public function shrinkRangeToFit($range) { $maxCol = $this->getHighestColumn(); $maxRow = $this->getHighestRow(); $maxCol = Coordinate::columnIndexFromString($maxCol); $rangeBlocks = explode(' ', $range); foreach ($rangeBlocks as &$rangeSet) { $rangeBoundaries = Coordinate::getRangeBoundaries($rangeSet); if (Coordinate::columnIndexFromString($rangeBoundaries[0][0]) > $maxCol) { $rangeBoundaries[0][0] = Coordinate::stringFromColumnIndex($maxCol); } if ($rangeBoundaries[0][1] > $maxRow) { $rangeBoundaries[0][1] = $maxRow; } if (Coordinate::columnIndexFromString($rangeBoundaries[1][0]) > $maxCol) { $rangeBoundaries[1][0] = Coordinate::stringFromColumnIndex($maxCol); } if ($rangeBoundaries[1][1] > $maxRow) { $rangeBoundaries[1][1] = $maxRow; } $rangeSet = $rangeBoundaries[0][0] . $rangeBoundaries[0][1] . ':' . $rangeBoundaries[1][0] . $rangeBoundaries[1][1]; } unset($rangeSet); $stRange = implode(' ', $rangeBlocks); return $stRange; } /** * Get tab color. * * @return Color */ public function getTabColor() { if ($this->tabColor === null) { $this->tabColor = new Color(); } return $this->tabColor; } /** * Reset tab color. * * @return Worksheet */ public function resetTabColor() { $this->tabColor = null; unset($this->tabColor); return $this; } /** * Tab color set? * * @return bool */ public function isTabColorSet() { return $this->tabColor !== null; } /** * Copy worksheet (!= clone!). * * @return Worksheet */ public function copy() { $copied = clone $this; return $copied; } /** * Implement PHP __clone to create a deep clone, not just a shallow copy. */ public function __clone() { foreach ($this as $key => $val) { if ($key == 'parent') { continue; } if (is_object($val) || (is_array($val))) { if ($key == 'cellCollection') { $newCollection = $this->cellCollection->cloneCellCollection($this); $this->cellCollection = $newCollection; } elseif ($key == 'drawingCollection') { $currentCollection = $this->drawingCollection; $this->drawingCollection = new ArrayObject(); foreach ($currentCollection as $item) { if (is_object($item)) { $newDrawing = clone $item; $newDrawing->setWorksheet($this); } } } elseif (($key == 'autoFilter') && ($this->autoFilter instanceof AutoFilter)) { $newAutoFilter = clone $this->autoFilter; $this->autoFilter = $newAutoFilter; $this->autoFilter->setParent($this); } else { $this->{$key} = unserialize(serialize($val)); } } } } /** * Define the code name of the sheet. * * @param string $pValue Same rule as Title minus space not allowed (but, like Excel, change * silently space to underscore) * @param bool $validate False to skip validation of new title. WARNING: This should only be set * at parse time (by Readers), where titles can be assumed to be valid. * * @throws Exception * * @return Worksheet */ public function setCodeName($pValue, $validate = true) { // Is this a 'rename' or not? if ($this->getCodeName() == $pValue) { return $this; } if ($validate) { $pValue = str_replace(' ', '_', $pValue); //Excel does this automatically without flinching, we are doing the same // Syntax check // throw an exception if not valid self::checkSheetCodeName($pValue); // We use the same code that setTitle to find a valid codeName else not using a space (Excel don't like) but a '_' if ($this->getParent()) { // Is there already such sheet name? if ($this->getParent()->sheetCodeNameExists($pValue)) { // Use name, but append with lowest possible integer if (Shared\StringHelper::countCharacters($pValue) > 29) { $pValue = Shared\StringHelper::substring($pValue, 0, 29); } $i = 1; while ($this->getParent()->sheetCodeNameExists($pValue . '_' . $i)) { ++$i; if ($i == 10) { if (Shared\StringHelper::countCharacters($pValue) > 28) { $pValue = Shared\StringHelper::substring($pValue, 0, 28); } } elseif ($i == 100) { if (Shared\StringHelper::countCharacters($pValue) > 27) { $pValue = Shared\StringHelper::substring($pValue, 0, 27); } } } $pValue = $pValue . '_' . $i; // ok, we have a valid name } } } $this->codeName = $pValue; return $this; } /** * Return the code name of the sheet. * * @return null|string */ public function getCodeName() { return $this->codeName; } /** * Sheet has a code name ? * * @return bool */ public function hasCodeName() { return $this->codeName !== null; } }
keithbox/AngularJS-CRUD-PHP
vendor/phpoffice/phpspreadsheet/src/PhpSpreadsheet/Worksheet/Worksheet.php
PHP
mit
87,851
import {addClass, hasClass, empty} from './../helpers/dom/element'; import {eventManager as eventManagerObject} from './../eventManager'; import {getRenderer, registerRenderer} from './../renderers'; import {WalkontableCellCoords} from './../3rdparty/walkontable/src/cell/coords'; var clonableWRAPPER = document.createElement('DIV'); clonableWRAPPER.className = 'htAutocompleteWrapper'; var clonableARROW = document.createElement('DIV'); clonableARROW.className = 'htAutocompleteArrow'; // workaround for https://github.com/handsontable/handsontable/issues/1946 // this is faster than innerHTML. See: https://github.com/handsontable/handsontable/wiki/JavaScript-&-DOM-performance-tips clonableARROW.appendChild(document.createTextNode(String.fromCharCode(9660))); var wrapTdContentWithWrapper = function(TD, WRAPPER) { WRAPPER.innerHTML = TD.innerHTML; empty(TD); TD.appendChild(WRAPPER); }; /** * Autocomplete renderer * * @private * @renderer AutocompleteRenderer * @param {Object} instance Handsontable instance * @param {Element} TD Table cell where to render * @param {Number} row * @param {Number} col * @param {String|Number} prop Row object property name * @param value Value to render (remember to escape unsafe HTML before inserting to DOM!) * @param {Object} cellProperties Cell properites (shared by cell renderer and editor) */ function autocompleteRenderer(instance, TD, row, col, prop, value, cellProperties) { var WRAPPER = clonableWRAPPER.cloneNode(true); //this is faster than createElement var ARROW = clonableARROW.cloneNode(true); //this is faster than createElement getRenderer('text')(instance, TD, row, col, prop, value, cellProperties); TD.appendChild(ARROW); addClass(TD, 'htAutocomplete'); if (!TD.firstChild) { //http://jsperf.com/empty-node-if-needed //otherwise empty fields appear borderless in demo/renderers.html (IE) TD.appendChild(document.createTextNode(String.fromCharCode(160))); // workaround for https://github.com/handsontable/handsontable/issues/1946 //this is faster than innerHTML. See: https://github.com/handsontable/handsontable/wiki/JavaScript-&-DOM-performance-tips } if (!instance.acArrowListener) { var eventManager = eventManagerObject(instance); //not very elegant but easy and fast instance.acArrowListener = function(event) { if (hasClass(event.target, 'htAutocompleteArrow')) { instance.view.wt.getSetting('onCellDblClick', null, new WalkontableCellCoords(row, col), TD); } }; eventManager.addEventListener(instance.rootElement, 'mousedown', instance.acArrowListener); //We need to unbind the listener after the table has been destroyed instance.addHookOnce('afterDestroy', function() { eventManager.destroy(); }); } } export {autocompleteRenderer}; registerRenderer('autocomplete', autocompleteRenderer);
pingyuanChen/handsontable
src/renderers/autocompleteRenderer.js
JavaScript
mit
2,882
//---------------------------------------------------------------------------- // Copyright (c) Microsoft Corporation. All rights reserved. //---------------------------------------------------------------------------- namespace System.ServiceModel.Channels { using System.Collections.Generic; using System.Runtime; using System.ServiceModel; using System.ServiceModel.Description; using System.ServiceModel.Security; using SR = System.ServiceModel.SR; sealed class TransactionChannelFactory<TChannel> : LayeredChannelFactory<TChannel>, ITransactionChannelManager { TransactionFlowOption flowIssuedTokens; SecurityStandardsManager standardsManager; Dictionary<DirectionalAction, TransactionFlowOption> dictionary; TransactionProtocol transactionProtocol; bool allowWildcardAction; public TransactionChannelFactory( TransactionProtocol transactionProtocol, BindingContext context, Dictionary<DirectionalAction, TransactionFlowOption> dictionary, bool allowWildcardAction) : base(context.Binding, context.BuildInnerChannelFactory<TChannel>()) { this.dictionary = dictionary; this.TransactionProtocol = transactionProtocol; this.allowWildcardAction = allowWildcardAction; this.standardsManager = SecurityStandardsHelper.CreateStandardsManager(this.TransactionProtocol); } public TransactionProtocol TransactionProtocol { get { return this.transactionProtocol; } set { if (!TransactionProtocol.IsDefined(value)) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( new ArgumentException(SR.GetString(SR.SFxBadTransactionProtocols))); this.transactionProtocol = value; } } public TransactionFlowOption FlowIssuedTokens { get { return this.flowIssuedTokens; } set { this.flowIssuedTokens = value; } } public SecurityStandardsManager StandardsManager { get { return this.standardsManager; } set { this.standardsManager = (value != null ? value : SecurityStandardsHelper.CreateStandardsManager(this.transactionProtocol)); } } public IDictionary<DirectionalAction, TransactionFlowOption> Dictionary { get { return this.dictionary; } } public TransactionFlowOption GetTransaction(MessageDirection direction, string action) { TransactionFlowOption txOption; if (!dictionary.TryGetValue(new DirectionalAction(direction, action), out txOption)) { //Fixinng this for clients that opted in for lesser validation before flowing out a transaction if (this.allowWildcardAction && dictionary.TryGetValue(new DirectionalAction(direction, MessageHeaders.WildcardAction), out txOption)) { return txOption; } else return TransactionFlowOption.NotAllowed; } else return txOption; } protected override TChannel OnCreateChannel(EndpointAddress remoteAddress, Uri via) { TChannel innerChannel = ((IChannelFactory<TChannel>)InnerChannelFactory).CreateChannel(remoteAddress, via); return CreateTransactionChannel(innerChannel); } TChannel CreateTransactionChannel(TChannel innerChannel) { if (typeof(TChannel) == typeof(IDuplexSessionChannel)) { return (TChannel)(object)new TransactionDuplexSessionChannel(this, (IDuplexSessionChannel)(object)innerChannel); } else if (typeof(TChannel) == typeof(IRequestSessionChannel)) { return (TChannel)(object)new TransactionRequestSessionChannel(this, (IRequestSessionChannel)(object)innerChannel); } else if (typeof(TChannel) == typeof(IOutputSessionChannel)) { return (TChannel)(object)new TransactionOutputSessionChannel(this, (IOutputSessionChannel)(object)innerChannel); } else if (typeof(TChannel) == typeof(IOutputChannel)) { return (TChannel)(object)new TransactionOutputChannel(this, (IOutputChannel)(object)innerChannel); } else if (typeof(TChannel) == typeof(IRequestChannel)) { return (TChannel)(object)new TransactionRequestChannel(this, (IRequestChannel)(object)innerChannel); } else if (typeof(TChannel) == typeof(IDuplexChannel)) { return (TChannel)(object)new TransactionDuplexChannel(this, (IDuplexChannel)(object)innerChannel); } else { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateChannelTypeNotSupportedException(typeof(TChannel))); } } //=========================================================== // Transaction Output Channel classes //=========================================================== sealed class TransactionOutputChannel : TransactionOutputChannelGeneric<IOutputChannel> { public TransactionOutputChannel(ChannelManagerBase channelManager, IOutputChannel innerChannel) : base(channelManager, innerChannel) { } } sealed class TransactionRequestChannel : TransactionRequestChannelGeneric<IRequestChannel> { public TransactionRequestChannel(ChannelManagerBase channelManager, IRequestChannel innerChannel) : base(channelManager, innerChannel) { } } sealed class TransactionDuplexChannel : TransactionOutputDuplexChannelGeneric<IDuplexChannel> { public TransactionDuplexChannel(ChannelManagerBase channelManager, IDuplexChannel innerChannel) : base(channelManager, innerChannel) { } } sealed class TransactionOutputSessionChannel : TransactionOutputChannelGeneric<IOutputSessionChannel>, IOutputSessionChannel { public TransactionOutputSessionChannel(ChannelManagerBase channelManager, IOutputSessionChannel innerChannel) : base(channelManager, innerChannel) { } public IOutputSession Session { get { return InnerChannel.Session; } } } sealed class TransactionRequestSessionChannel : TransactionRequestChannelGeneric<IRequestSessionChannel>, IRequestSessionChannel { public TransactionRequestSessionChannel(ChannelManagerBase channelManager, IRequestSessionChannel innerChannel) : base(channelManager, innerChannel) { } public IOutputSession Session { get { return InnerChannel.Session; } } } sealed class TransactionDuplexSessionChannel : TransactionOutputDuplexChannelGeneric<IDuplexSessionChannel>, IDuplexSessionChannel { public TransactionDuplexSessionChannel(ChannelManagerBase channelManager, IDuplexSessionChannel innerChannel) : base(channelManager, innerChannel) { } public IDuplexSession Session { get { return InnerChannel.Session; } } } } static class SecurityStandardsHelper { static SecurityStandardsManager SecurityStandardsManager2007 = CreateStandardsManager(MessageSecurityVersion.WSSecurity11WSTrust13WSSecureConversation13WSSecurityPolicy12); static SecurityStandardsManager CreateStandardsManager(MessageSecurityVersion securityVersion) { return new SecurityStandardsManager( securityVersion, new WSSecurityTokenSerializer(securityVersion.SecurityVersion, securityVersion.TrustVersion, securityVersion.SecureConversationVersion, false, null, null, null)); } public static SecurityStandardsManager CreateStandardsManager(TransactionProtocol transactionProtocol) { if (transactionProtocol == TransactionProtocol.WSAtomicTransactionOctober2004 || transactionProtocol == TransactionProtocol.OleTransactions) { return SecurityStandardsManager.DefaultInstance; } else { return SecurityStandardsHelper.SecurityStandardsManager2007; } } } //============================================================== // Transaction channel base generic classes //============================================================== class TransactionOutputChannelGeneric<TChannel> : TransactionChannel<TChannel>, IOutputChannel where TChannel : class, IOutputChannel { public TransactionOutputChannelGeneric(ChannelManagerBase channelManager, TChannel innerChannel) : base(channelManager, innerChannel) { } public EndpointAddress RemoteAddress { get { return InnerChannel.RemoteAddress; } } public Uri Via { get { return InnerChannel.Via; } } public IAsyncResult BeginSend(Message message, AsyncCallback callback, object state) { return this.BeginSend(message, this.DefaultSendTimeout, callback, state); } public IAsyncResult BeginSend(Message message, TimeSpan timeout, AsyncCallback asyncCallback, object state) { TimeoutHelper timeoutHelper = new TimeoutHelper(timeout); WriteTransactionDataToMessage(message, MessageDirection.Input); return InnerChannel.BeginSend(message, timeoutHelper.RemainingTime(), asyncCallback, state); } public void EndSend(IAsyncResult result) { InnerChannel.EndSend(result); } public void Send(Message message) { this.Send(message, this.DefaultSendTimeout); } public void Send(Message message, TimeSpan timeout) { TimeoutHelper timeoutHelper = new TimeoutHelper(timeout); WriteTransactionDataToMessage(message, MessageDirection.Input); InnerChannel.Send(message, timeoutHelper.RemainingTime()); } } class TransactionRequestChannelGeneric<TChannel> : TransactionChannel<TChannel>, IRequestChannel where TChannel : class, IRequestChannel { public TransactionRequestChannelGeneric(ChannelManagerBase channelManager, TChannel innerChannel) : base(channelManager, innerChannel) { } public EndpointAddress RemoteAddress { get { return InnerChannel.RemoteAddress; } } public Uri Via { get { return InnerChannel.Via; } } public IAsyncResult BeginRequest(Message message, AsyncCallback callback, object state) { return this.BeginRequest(message, this.DefaultSendTimeout, callback, state); } public IAsyncResult BeginRequest(Message message, TimeSpan timeout, AsyncCallback asyncCallback, object state) { TimeoutHelper timeoutHelper = new TimeoutHelper(timeout); WriteTransactionDataToMessage(message, MessageDirection.Input); return InnerChannel.BeginRequest(message, timeoutHelper.RemainingTime(), asyncCallback, state); } public Message EndRequest(IAsyncResult result) { Message reply = InnerChannel.EndRequest(result); if (reply != null) this.ReadIssuedTokens(reply, MessageDirection.Output); return reply; } public Message Request(Message message) { return this.Request(message, this.DefaultSendTimeout); } public Message Request(Message message, TimeSpan timeout) { TimeoutHelper timeoutHelper = new TimeoutHelper(timeout); WriteTransactionDataToMessage(message, MessageDirection.Input); Message reply = InnerChannel.Request(message, timeoutHelper.RemainingTime()); if (reply != null) this.ReadIssuedTokens(reply, MessageDirection.Output); return reply; } } class TransactionOutputDuplexChannelGeneric<TChannel> : TransactionDuplexChannelGeneric<TChannel> where TChannel : class, IDuplexChannel { public TransactionOutputDuplexChannelGeneric(ChannelManagerBase channelManager, TChannel innerChannel) : base(channelManager, innerChannel, MessageDirection.Output) { } } }
sekcheong/referencesource
System.ServiceModel/System/ServiceModel/Channels/TransactionChannelFactory.cs
C#
mit
13,242
module Jekyll class TagIndex < Page def initialize(site, base, dir, tag) @site = site @base = base @dir = dir @name = 'index.html' self.process(@name) self.read_yaml(File.join(base, '_layouts'), 'tag_index.html') self.data['tag'] = tag self.data['title'] = "Posts Tagged &ldquo;"+tag+"&rdquo;" end end class TagGenerator < Generator safe true def generate(site) if site.layouts.key? 'tag_index' dir = 'tags' site.tags.keys.each do |tag| write_tag_index(site, File.join(dir, tag), tag) end end end def write_tag_index(site, dir, tag) index = TagIndex.new(site, site.source, dir, tag) index.render(site.layouts, site.site_payload) index.write(site.dest) site.pages << index end end end
johnDorian/johnDorian.github.io
_plugins/tag_gen.rb
Ruby
mit
853
// The MIT License (MIT) // Copyright (c) 2013-2016 Rapptz, ThePhD and contributors // Permission is hereby granted, free of charge, to any person obtaining a copy of // this software and associated documentation files (the "Software"), to deal in // the Software without restriction, including without limitation the rights to // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of // the Software, and to permit persons to whom the Software is furnished to do so, // subject to the following conditions: // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #ifndef SOL_DEMANGLE_HPP #define SOL_DEMANGLE_HPP #include <string> #include <array> #include <cctype> namespace sol { namespace detail { #ifdef _MSC_VER template <typename T> inline std::string ctti_get_type_name() { const static std::array<std::string, 7> removals = { { "public:", "private:", "protected:", "struct ", "class ", "`anonymous-namespace'", "`anonymous namespace'" } }; std::string name = __FUNCSIG__; std::size_t start = name.find("get_type_name"); if (start == std::string::npos) start = 0; else start += 13; if (start < name.size() - 1) start += 1; std::size_t end = name.find_last_of('>'); if (end == std::string::npos) end = name.size(); name = name.substr(start, end - start); if (name.find("struct", 0) == 0) name.replace(0, 6, "", 0); if (name.find("class", 0) == 0) name.replace(0, 5, "", 0); while (!name.empty() && std::isblank(name.front())) name.erase(name.begin()); while (!name.empty() && std::isblank(name.back())) name.pop_back(); for (std::size_t r = 0; r < removals.size(); ++r) { auto found = name.find(removals[r]); while (found != std::string::npos) { name.erase(found, removals[r].size()); found = name.find(removals[r]); } } return name; } #elif defined(__GNUC__) || defined(__clang__) template <typename T, class seperator_mark = int> inline std::string ctti_get_type_name() { const static std::array<std::string, 2> removals = { { "{anonymous}", "(anonymous namespace)" } }; std::string name = __PRETTY_FUNCTION__; std::size_t start = name.find_first_of('['); start = name.find_first_of('=', start); std::size_t end = name.find_last_of(']'); if (end == std::string::npos) end = name.size(); if (start == std::string::npos) start = 0; if (start < name.size() - 1) start += 1; name = name.substr(start, end - start); start = name.rfind("seperator_mark"); if (start != std::string::npos) { name.erase(start - 2, name.length()); } while (!name.empty() && std::isblank(name.front())) name.erase(name.begin()); while (!name.empty() && std::isblank(name.back())) name.pop_back(); for (std::size_t r = 0; r < removals.size(); ++r) { auto found = name.find(removals[r]); while (found != std::string::npos) { name.erase(found, removals[r].size()); found = name.find(removals[r]); } } return name; } #else #error Compiler not supported for demangling #endif // compilers template <typename T> inline std::string demangle_once() { std::string realname = ctti_get_type_name<T>(); return realname; } template <typename T> inline std::string short_demangle_once() { std::string realname = ctti_get_type_name<T>(); // This isn't the most complete but it'll do for now...? static const std::array<std::string, 10> ops = { { "operator<", "operator<<", "operator<<=", "operator<=", "operator>", "operator>>", "operator>>=", "operator>=", "operator->", "operator->*" } }; int level = 0; std::ptrdiff_t idx = 0; for (idx = static_cast<std::ptrdiff_t>(realname.empty() ? 0 : realname.size() - 1); idx > 0; --idx) { if (level == 0 && realname[idx] == ':') { break; } bool isleft = realname[idx] == '<'; bool isright = realname[idx] == '>'; if (!isleft && !isright) continue; bool earlybreak = false; for (const auto& op : ops) { std::size_t nisop = realname.rfind(op, idx); if (nisop == std::string::npos) continue; std::size_t nisopidx = idx - op.size() + 1; if (nisop == nisopidx) { idx = static_cast<std::ptrdiff_t>(nisopidx); earlybreak = true; } break; } if (earlybreak) { continue; } level += isleft ? -1 : 1; } if (idx > 0) { realname.erase(0, realname.length() < static_cast<std::size_t>(idx) ? realname.length() : idx + 1); } return realname; } template <typename T> inline const std::string& demangle() { static const std::string d = demangle_once<T>(); return d; } template <typename T> inline const std::string& short_demangle() { static const std::string d = short_demangle_once<T>(); return d; } } // detail } // sol #endif // SOL_DEMANGLE_HPP
devxkh/FrankE
src/ThirdParty/sol/sol/demangle.hpp
C++
mit
5,420
var one = { name: 'one' };
inodient/summer-mvc
node_modules/dojo/tests/functional/_base/loader/requirejs/urlfetch/one.js
JavaScript
mit
28
/* * Copyright (c) 2003, 2008, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.java2d.opengl; import java.awt.AlphaComposite; import java.awt.GraphicsEnvironment; import java.awt.Rectangle; import java.awt.Transparency; import java.awt.image.ColorModel; import java.awt.image.Raster; import sun.awt.SunHints; import sun.awt.image.PixelConverter; import sun.java2d.pipe.hw.AccelSurface; import sun.java2d.SunGraphics2D; import sun.java2d.SurfaceData; import sun.java2d.SurfaceDataProxy; import sun.java2d.loops.CompositeType; import sun.java2d.loops.GraphicsPrimitive; import sun.java2d.loops.MaskFill; import sun.java2d.loops.SurfaceType; import sun.java2d.pipe.ParallelogramPipe; import sun.java2d.pipe.PixelToParallelogramConverter; import sun.java2d.pipe.RenderBuffer; import sun.java2d.pipe.TextPipe; import static sun.java2d.pipe.BufferedOpCodes.*; import static sun.java2d.opengl.OGLContext.OGLContextCaps.*; /** * This class describes an OpenGL "surface", that is, a region of pixels * managed via OpenGL. An OGLSurfaceData can be tagged with one of three * different SurfaceType objects for the purpose of registering loops, etc. * This diagram shows the hierarchy of OGL SurfaceTypes: * * Any * / \ * OpenGLSurface OpenGLTexture * | * OpenGLSurfaceRTT * * OpenGLSurface * This kind of surface can be rendered to using OpenGL APIs. It is also * possible to copy an OpenGLSurface to another OpenGLSurface (or to itself). * This is typically accomplished by calling MakeContextCurrent(dstSD, srcSD) * and then calling glCopyPixels() (although there are other techniques to * achieve the same goal). * * OpenGLTexture * This kind of surface cannot be rendered to using OpenGL (in the same sense * as in OpenGLSurface). However, it is possible to upload a region of pixels * to an OpenGLTexture object via glTexSubImage2D(). One can also copy a * surface of type OpenGLTexture to an OpenGLSurface by binding the texture * to a quad and then rendering it to the destination surface (this process * is known as "texture mapping"). * * OpenGLSurfaceRTT * This kind of surface can be thought of as a sort of hybrid between * OpenGLSurface and OpenGLTexture, in that one can render to this kind of * surface as if it were of type OpenGLSurface, but the process of copying * this kind of surface to another is more like an OpenGLTexture. (Note that * "RTT" stands for "render-to-texture".) * * In addition to these SurfaceType variants, we have also defined some * constants that describe in more detail the type of underlying OpenGL * surface. This table helps explain the relationships between those * "type" constants and their corresponding SurfaceType: * * OGL Type Corresponding SurfaceType * -------- ------------------------- * WINDOW OpenGLSurface * PBUFFER OpenGLSurface * TEXTURE OpenGLTexture * FLIP_BACKBUFFER OpenGLSurface * FBOBJECT OpenGLSurfaceRTT */ public abstract class OGLSurfaceData extends SurfaceData implements AccelSurface { /** * OGL-specific surface types * * @see sun.java2d.pipe.hw.AccelSurface */ public static final int PBUFFER = RT_PLAIN; public static final int FBOBJECT = RT_TEXTURE; /** * Pixel formats */ public static final int PF_INT_ARGB = 0; public static final int PF_INT_ARGB_PRE = 1; public static final int PF_INT_RGB = 2; public static final int PF_INT_RGBX = 3; public static final int PF_INT_BGR = 4; public static final int PF_INT_BGRX = 5; public static final int PF_USHORT_565_RGB = 6; public static final int PF_USHORT_555_RGB = 7; public static final int PF_USHORT_555_RGBX = 8; public static final int PF_BYTE_GRAY = 9; public static final int PF_USHORT_GRAY = 10; public static final int PF_3BYTE_BGR = 11; /** * SurfaceTypes */ private static final String DESC_OPENGL_SURFACE = "OpenGL Surface"; private static final String DESC_OPENGL_SURFACE_RTT = "OpenGL Surface (render-to-texture)"; private static final String DESC_OPENGL_TEXTURE = "OpenGL Texture"; static final SurfaceType OpenGLSurface = SurfaceType.Any.deriveSubType(DESC_OPENGL_SURFACE, PixelConverter.ArgbPre.instance); static final SurfaceType OpenGLSurfaceRTT = OpenGLSurface.deriveSubType(DESC_OPENGL_SURFACE_RTT); static final SurfaceType OpenGLTexture = SurfaceType.Any.deriveSubType(DESC_OPENGL_TEXTURE); /** This will be true if the fbobject system property has been enabled. */ private static boolean isFBObjectEnabled; /** This will be true if the lcdshader system property has been enabled.*/ private static boolean isLCDShaderEnabled; /** This will be true if the biopshader system property has been enabled.*/ private static boolean isBIOpShaderEnabled; /** This will be true if the gradshader system property has been enabled.*/ private static boolean isGradShaderEnabled; private OGLGraphicsConfig graphicsConfig; protected int type; // these fields are set from the native code when the surface is // initialized private int nativeWidth, nativeHeight; protected static OGLRenderer oglRenderPipe; protected static PixelToParallelogramConverter oglTxRenderPipe; protected static ParallelogramPipe oglAAPgramPipe; protected static OGLTextRenderer oglTextPipe; protected static OGLDrawImage oglImagePipe; protected native boolean initTexture(long pData, boolean isOpaque, boolean texNonPow2, boolean texRect, int width, int height); protected native boolean initFBObject(long pData, boolean isOpaque, boolean texNonPow2, boolean texRect, int width, int height); protected native boolean initFlipBackbuffer(long pData); protected abstract boolean initPbuffer(long pData, long pConfigInfo, boolean isOpaque, int width, int height); private native int getTextureTarget(long pData); private native int getTextureID(long pData); static { if (!GraphicsEnvironment.isHeadless()) { // fbobject currently enabled by default; use "false" to disable String fbo = (String)java.security.AccessController.doPrivileged( new sun.security.action.GetPropertyAction( "sun.java2d.opengl.fbobject")); isFBObjectEnabled = !"false".equals(fbo); // lcdshader currently enabled by default; use "false" to disable String lcd = (String)java.security.AccessController.doPrivileged( new sun.security.action.GetPropertyAction( "sun.java2d.opengl.lcdshader")); isLCDShaderEnabled = !"false".equals(lcd); // biopshader currently enabled by default; use "false" to disable String biop = (String)java.security.AccessController.doPrivileged( new sun.security.action.GetPropertyAction( "sun.java2d.opengl.biopshader")); isBIOpShaderEnabled = !"false".equals(biop); // gradshader currently enabled by default; use "false" to disable String grad = (String)java.security.AccessController.doPrivileged( new sun.security.action.GetPropertyAction( "sun.java2d.opengl.gradshader")); isGradShaderEnabled = !"false".equals(grad); OGLRenderQueue rq = OGLRenderQueue.getInstance(); oglImagePipe = new OGLDrawImage(); oglTextPipe = new OGLTextRenderer(rq); oglRenderPipe = new OGLRenderer(rq); if (GraphicsPrimitive.tracingEnabled()) { oglTextPipe = oglTextPipe.traceWrap(); //The wrapped oglRenderPipe will wrap the AA pipe as well... //oglAAPgramPipe = oglRenderPipe.traceWrap(); } oglAAPgramPipe = oglRenderPipe.getAAParallelogramPipe(); oglTxRenderPipe = new PixelToParallelogramConverter(oglRenderPipe, oglRenderPipe, 1.0, 0.25, true); OGLBlitLoops.register(); OGLMaskFill.register(); OGLMaskBlit.register(); } } protected OGLSurfaceData(OGLGraphicsConfig gc, ColorModel cm, int type) { super(getCustomSurfaceType(type), cm); this.graphicsConfig = gc; this.type = type; setBlitProxyKey(gc.getProxyKey()); } @Override public SurfaceDataProxy makeProxyFor(SurfaceData srcData) { return OGLSurfaceDataProxy.createProxy(srcData, graphicsConfig); } /** * Returns the appropriate SurfaceType corresponding to the given OpenGL * surface type constant (e.g. TEXTURE -> OpenGLTexture). */ private static SurfaceType getCustomSurfaceType(int oglType) { switch (oglType) { case TEXTURE: return OpenGLTexture; case FBOBJECT: return OpenGLSurfaceRTT; case PBUFFER: default: return OpenGLSurface; } } /** * Note: This should only be called from the QFT under the AWT lock. * This method is kept separate from the initSurface() method below just * to keep the code a bit cleaner. */ private void initSurfaceNow(int width, int height) { boolean isOpaque = (getTransparency() == Transparency.OPAQUE); boolean success = false; switch (type) { case PBUFFER: success = initPbuffer(getNativeOps(), graphicsConfig.getNativeConfigInfo(), isOpaque, width, height); break; case TEXTURE: success = initTexture(getNativeOps(), isOpaque, isTexNonPow2Available(), isTexRectAvailable(), width, height); break; case FBOBJECT: success = initFBObject(getNativeOps(), isOpaque, isTexNonPow2Available(), isTexRectAvailable(), width, height); break; case FLIP_BACKBUFFER: success = initFlipBackbuffer(getNativeOps()); break; default: break; } if (!success) { throw new OutOfMemoryError("can't create offscreen surface"); } } /** * Initializes the appropriate OpenGL offscreen surface based on the value * of the type parameter. If the surface creation fails for any reason, * an OutOfMemoryError will be thrown. */ protected void initSurface(final int width, final int height) { OGLRenderQueue rq = OGLRenderQueue.getInstance(); rq.lock(); try { switch (type) { case TEXTURE: case PBUFFER: case FBOBJECT: // need to make sure the context is current before // creating the texture (or pbuffer, or fbobject) OGLContext.setScratchSurface(graphicsConfig); break; default: break; } rq.flushAndInvokeNow(new Runnable() { public void run() { initSurfaceNow(width, height); } }); } finally { rq.unlock(); } } /** * Returns the OGLContext for the GraphicsConfig associated with this * surface. */ public final OGLContext getContext() { return graphicsConfig.getContext(); } /** * Returns the OGLGraphicsConfig associated with this surface. */ final OGLGraphicsConfig getOGLGraphicsConfig() { return graphicsConfig; } /** * Returns one of the surface type constants defined above. */ public final int getType() { return type; } /** * If this surface is backed by a texture object, returns the target * for that texture (either GL_TEXTURE_2D or GL_TEXTURE_RECTANGLE_ARB). * Otherwise, this method will return zero. */ public final int getTextureTarget() { return getTextureTarget(getNativeOps()); } /** * If this surface is backed by a texture object, returns the texture ID * for that texture. * Otherwise, this method will return zero. */ public final int getTextureID() { return getTextureID(getNativeOps()); } /** * Returns native resource of specified {@code resType} associated with * this surface. * * Specifically, for {@code OGLSurfaceData} this method returns the * the following: * <pre> * TEXTURE - texture id * </pre> * * Note: the resource returned by this method is only valid on the rendering * thread. * * @return native resource of specified type or 0L if * such resource doesn't exist or can not be retrieved. * @see sun.java2d.pipe.hw.AccelSurface#getNativeResource */ public long getNativeResource(int resType) { if (resType == TEXTURE) { return getTextureID(); } return 0L; } public Raster getRaster(int x, int y, int w, int h) { throw new InternalError("not implemented yet"); } /** * For now, we can only render LCD text if: * - the fragment shader extension is available, and * - blending is disabled, and * - the source color is opaque * - and the destination is opaque * * Eventually, we could enhance the native OGL text rendering code * and remove the above restrictions, but that would require significantly * more code just to support a few uncommon cases. */ public boolean canRenderLCDText(SunGraphics2D sg2d) { return graphicsConfig.isCapPresent(CAPS_EXT_LCD_SHADER) && sg2d.compositeState <= SunGraphics2D.COMP_ISCOPY && sg2d.paintState <= SunGraphics2D.PAINT_OPAQUECOLOR && sg2d.surfaceData.getTransparency() == Transparency.OPAQUE; } public void validatePipe(SunGraphics2D sg2d) { TextPipe textpipe; boolean validated = false; // OGLTextRenderer handles both AA and non-AA text, but // only works with the following modes: // (Note: For LCD text we only enter this code path if // canRenderLCDText() has already validated that the mode is // CompositeType.SrcNoEa (opaque color), which will be subsumed // by the CompositeType.SrcNoEa (any color) test below.) if (/* CompositeType.SrcNoEa (any color) */ (sg2d.compositeState <= sg2d.COMP_ISCOPY && sg2d.paintState <= sg2d.PAINT_ALPHACOLOR) || /* CompositeType.SrcOver (any color) */ (sg2d.compositeState == sg2d.COMP_ALPHA && sg2d.paintState <= sg2d.PAINT_ALPHACOLOR && (((AlphaComposite)sg2d.composite).getRule() == AlphaComposite.SRC_OVER)) || /* CompositeType.Xor (any color) */ (sg2d.compositeState == sg2d.COMP_XOR && sg2d.paintState <= sg2d.PAINT_ALPHACOLOR)) { textpipe = oglTextPipe; } else { // do this to initialize textpipe correctly; we will attempt // to override the non-text pipes below super.validatePipe(sg2d); textpipe = sg2d.textpipe; validated = true; } PixelToParallelogramConverter txPipe = null; OGLRenderer nonTxPipe = null; if (sg2d.antialiasHint != SunHints.INTVAL_ANTIALIAS_ON) { if (sg2d.paintState <= sg2d.PAINT_ALPHACOLOR) { if (sg2d.compositeState <= sg2d.COMP_XOR) { txPipe = oglTxRenderPipe; nonTxPipe = oglRenderPipe; } } else if (sg2d.compositeState <= sg2d.COMP_ALPHA) { if (OGLPaints.isValid(sg2d)) { txPipe = oglTxRenderPipe; nonTxPipe = oglRenderPipe; } // custom paints handled by super.validatePipe() below } } else { if (sg2d.paintState <= sg2d.PAINT_ALPHACOLOR) { if (graphicsConfig.isCapPresent(CAPS_PS30) && (sg2d.imageComp == CompositeType.SrcOverNoEa || sg2d.imageComp == CompositeType.SrcOver)) { if (!validated) { super.validatePipe(sg2d); validated = true; } PixelToParallelogramConverter aaConverter = new PixelToParallelogramConverter(sg2d.shapepipe, oglAAPgramPipe, 1.0/8.0, 0.499, false); sg2d.drawpipe = aaConverter; sg2d.fillpipe = aaConverter; sg2d.shapepipe = aaConverter; } else if (sg2d.compositeState == sg2d.COMP_XOR) { // install the solid pipes when AA and XOR are both enabled txPipe = oglTxRenderPipe; nonTxPipe = oglRenderPipe; } } // other cases handled by super.validatePipe() below } if (txPipe != null) { if (sg2d.transformState >= sg2d.TRANSFORM_TRANSLATESCALE) { sg2d.drawpipe = txPipe; sg2d.fillpipe = txPipe; } else if (sg2d.strokeState != sg2d.STROKE_THIN) { sg2d.drawpipe = txPipe; sg2d.fillpipe = nonTxPipe; } else { sg2d.drawpipe = nonTxPipe; sg2d.fillpipe = nonTxPipe; } // Note that we use the transforming pipe here because it // will examine the shape and possibly perform an optimized // operation if it can be simplified. The simplifications // will be valid for all STROKE and TRANSFORM types. sg2d.shapepipe = txPipe; } else { if (!validated) { super.validatePipe(sg2d); } } // install the text pipe based on our earlier decision sg2d.textpipe = textpipe; // always override the image pipe with the specialized OGL pipe sg2d.imagepipe = oglImagePipe; } @Override protected MaskFill getMaskFill(SunGraphics2D sg2d) { if (sg2d.paintState > sg2d.PAINT_ALPHACOLOR) { /* * We can only accelerate non-Color MaskFill operations if * all of the following conditions hold true: * - there is an implementation for the given paintState * - the current Paint can be accelerated for this destination * - multitexturing is available (since we need to modulate * the alpha mask texture with the paint texture) * * In all other cases, we return null, in which case the * validation code will choose a more general software-based loop. */ if (!OGLPaints.isValid(sg2d) || !graphicsConfig.isCapPresent(CAPS_MULTITEXTURE)) { return null; } } return super.getMaskFill(sg2d); } public boolean copyArea(SunGraphics2D sg2d, int x, int y, int w, int h, int dx, int dy) { if (sg2d.transformState < sg2d.TRANSFORM_TRANSLATESCALE && sg2d.compositeState < sg2d.COMP_XOR) { x += sg2d.transX; y += sg2d.transY; oglRenderPipe.copyArea(sg2d, x, y, w, h, dx, dy); return true; } return false; } public void flush() { invalidate(); OGLRenderQueue rq = OGLRenderQueue.getInstance(); rq.lock(); try { // make sure we have a current context before // disposing the native resources (e.g. texture object) OGLContext.setScratchSurface(graphicsConfig); RenderBuffer buf = rq.getBuffer(); rq.ensureCapacityAndAlignment(12, 4); buf.putInt(FLUSH_SURFACE); buf.putLong(getNativeOps()); // this call is expected to complete synchronously, so flush now rq.flushNow(); } finally { rq.unlock(); } } /** * Disposes the native resources associated with the given OGLSurfaceData * (referenced by the pData parameter). This method is invoked from * the native Dispose() method from the Disposer thread when the * Java-level OGLSurfaceData object is about to go away. Note that we * also pass a reference to the native GLX/WGLGraphicsConfigInfo * (pConfigInfo) for the purposes of making a context current. */ static void dispose(long pData, long pConfigInfo) { OGLRenderQueue rq = OGLRenderQueue.getInstance(); rq.lock(); try { // make sure we have a current context before // disposing the native resources (e.g. texture object) OGLContext.setScratchSurface(pConfigInfo); RenderBuffer buf = rq.getBuffer(); rq.ensureCapacityAndAlignment(12, 4); buf.putInt(DISPOSE_SURFACE); buf.putLong(pData); // this call is expected to complete synchronously, so flush now rq.flushNow(); } finally { rq.unlock(); } } static void swapBuffers(long window) { OGLRenderQueue rq = OGLRenderQueue.getInstance(); rq.lock(); try { RenderBuffer buf = rq.getBuffer(); rq.ensureCapacityAndAlignment(12, 4); buf.putInt(SWAP_BUFFERS); buf.putLong(window); rq.flushNow(); } finally { rq.unlock(); } } /** * Returns true if OpenGL textures can have non-power-of-two dimensions * when using the basic GL_TEXTURE_2D target. */ boolean isTexNonPow2Available() { return graphicsConfig.isCapPresent(CAPS_TEXNONPOW2); } /** * Returns true if OpenGL textures can have non-power-of-two dimensions * when using the GL_TEXTURE_RECTANGLE_ARB target (only available when the * GL_ARB_texture_rectangle extension is present). */ boolean isTexRectAvailable() { return graphicsConfig.isCapPresent(CAPS_EXT_TEXRECT); } public Rectangle getNativeBounds() { OGLRenderQueue rq = OGLRenderQueue.getInstance(); rq.lock(); try { return new Rectangle(nativeWidth, nativeHeight); } finally { rq.unlock(); } } }
rokn/Count_Words_2015
testing/openjdk/jdk/src/share/classes/sun/java2d/opengl/OGLSurfaceData.java
Java
mit
24,976
<?php /* * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * This software consists of voluntary contributions made by many individuals * and is licensed under the MIT license. For more information, see * <http://www.doctrine-project.org>. */ namespace Doctrine\MongoDB\Event; use Doctrine\Common\EventArgs as BaseEventArgs; /** * Event args. * * @license http://www.opensource.org/licenses/mit-license.php MIT * @link www.doctrine-project.com * @since 1.0 * @author Jonathan H. Wage <jonwage@gmail.com> */ class EventArgs extends BaseEventArgs { private $invoker; private $data; public function __construct($invoker, &$data = null) { $this->invoker = $invoker; $this->data = $data; } public function getInvoker() { return $this->invoker; } public function getData() { return $this->data; } }
rafaelcalleja/EmptySymfony2Project
vendor/doctrine/mongodb/lib/Doctrine/MongoDB/Event/EventArgs.php
PHP
mit
1,647
var crypto = require('crypto'); var scmp = require('scmp'); var utils = require('keystone-utils'); // The DISABLE_CSRF environment variable is available to automatically pass // CSRF validation. This is useful in development scenarios where you want to // restart the node process and aren't using a persistent session store, but // should NEVER be set in production environments!! var DISABLE_CSRF = process.env.DISABLE_CSRF === 'true'; exports.TOKEN_KEY = '_csrf'; exports.LOCAL_KEY = 'csrf_token_key'; exports.LOCAL_VALUE = 'csrf_token_value'; exports.SECRET_KEY = exports.TOKEN_KEY + '_secret'; exports.SECRET_LENGTH = 10; exports.CSRF_HEADER_KEY = 'x-csrf-token'; exports.XSRF_HEADER_KEY = 'x-xsrf-token'; exports.XSRF_COOKIE_KEY = 'XSRF-TOKEN'; function tokenize (salt, secret) { return salt + crypto.createHash('sha1').update(salt + secret).digest('hex'); } exports.createSecret = function () { return crypto.pseudoRandomBytes(exports.SECRET_LENGTH).toString('base64'); }; exports.getSecret = function (req) { return req.session[exports.SECRET_KEY] || (req.session[exports.SECRET_KEY] = exports.createSecret()); }; exports.createToken = function (req) { return tokenize(utils.randomString(exports.SECRET_LENGTH), exports.getSecret(req)); }; exports.getToken = function (req, res) { res.locals[exports.LOCAL_VALUE] = res.locals[exports.LOCAL_VALUE] || exports.createToken(req); res.cookie(exports.XSRF_COOKIE_KEY, res.locals[exports.LOCAL_VALUE]); return res.locals[exports.LOCAL_VALUE]; }; exports.requestToken = function (req) { if (req.body && req.body[exports.TOKEN_KEY]) { return req.body[exports.TOKEN_KEY]; } else if (req.query && req.query[exports.TOKEN_KEY]) { return req.query[exports.TOKEN_KEY]; } else if (req.headers && req.headers[exports.XSRF_HEADER_KEY]) { return req.headers[exports.XSRF_HEADER_KEY]; } else if (req.headers && req.headers[exports.CSRF_HEADER_KEY]) { return req.headers[exports.CSRF_HEADER_KEY]; } return ''; }; exports.validate = function (req, token) { // Allow environment variable to disable check if (DISABLE_CSRF) return true; if (arguments.length === 1) { token = exports.requestToken(req); } if (typeof token !== 'string') { return false; } return scmp(token, tokenize(token.slice(0, exports.SECRET_LENGTH), req.session[exports.SECRET_KEY])); }; exports.middleware = { init: function (req, res, next) { res.locals[exports.LOCAL_KEY] = exports.LOCAL_VALUE; exports.getToken(req, res); next(); }, validate: function (req, res, next) { // Allow environment variable to disable check if (DISABLE_CSRF) return next(); // Bail on safe methods if (req.method === 'GET' || req.method === 'HEAD' || req.method === 'OPTIONS') { return next(); } // Validate token if (exports.validate(req)) { next(); } else { res.statusCode = 403; next(new Error('CSRF token mismatch')); } }, };
alobodig/keystone
lib/security/csrf.js
JavaScript
mit
2,898
<?php /** * File containing the ezcDocumentDocbook class * * @package Document * @version //autogen// * @copyright Copyright (C) 2005-2010 eZ Systems AS. All rights reserved. * @license http://ez.no/licenses/new_bsd New BSD License */ /** * The document handler for the docbook document markup. * * @package Document * @version //autogen// */ class ezcDocumentDocbook extends ezcDocumentXmlBase { /** * Construct document xml base. * * @ignore * @param ezcDocumentDocbookOptions $options * @return void */ public function __construct( ezcDocumentDocbookOptions $options = null ) { parent::__construct( $options === null ? new ezcDocumentDocbookOptions() : $options ); } /** * Return document compiled to the docbook format * * The internal document structure is compiled to the docbook format and * the resulting docbook document is returned. * * This method is required for all formats to have one central format, so * that each format can be compiled into each other format using docbook as * an intermediate format. * * You may of course just call an existing converter for this conversion. * * @return ezcDocumentDocbook */ public function getAsDocbook() { return $this; } /** * Create document from docbook document * * A document of the docbook format is provided and the internal document * structure should be created out of this. * * This method is required for all formats to have one central format, so * that each format can be compiled into each other format using docbook as * an intermediate format. * * You may of course just call an existing converter for this conversion. * * @param ezcDocumentDocbook $document * @return void */ public function createFromDocbook( ezcDocumentDocbook $document ) { $this->path = $document->getPath(); $this->document = $document->getDomDocument(); } /** * Return document as string * * Serialize the document to a string an return it. * * @return string */ public function save() { return $this->document->saveXml(); } /** * Validate the input file * * Validate the input file against the specification of the current * document format. * * Returns true, if the validation succeded, and an array with * ezcDocumentValidationError objects otherwise. * * @param string $file * @return mixed */ public function validateFile( $file ) { $oldSetting = libxml_use_internal_errors( true ); libxml_clear_errors(); $document = new DOMDocument(); $document->load( $file ); $document->schemaValidate( $this->options->schema ); // Get all errors $xmlErrors = libxml_get_errors(); $errors = array(); foreach ( $xmlErrors as $error ) { $errors[] = ezcDocumentValidationError::createFromLibXmlError( $error ); } libxml_clear_errors(); libxml_use_internal_errors( $oldSetting ); return ( count( $errors ) ? $errors : true ); } /** * Validate the input string * * Validate the input string against the specification of the current * document format. * * Returns true, if the validation succeded, and an array with * ezcDocumentValidationError objects otherwise. * * @param string $string * @return mixed */ public function validateString( $string ) { $oldSetting = libxml_use_internal_errors( true ); libxml_clear_errors(); $document = new DOMDocument(); $document->loadXml( $string ); $document->schemaValidate( $this->options->schema ); // Get all errors $xmlErrors = libxml_get_errors(); $errors = array(); foreach ( $xmlErrors as $error ) { $errors[] = ezcDocumentValidationError::createFromLibXmlError( $error ); } libxml_clear_errors(); libxml_use_internal_errors( $oldSetting ); return ( count( $errors ) ? $errors : true ); } } ?>
jim835/qscene_ez
lib/ezc/Document/src/document/xml/docbook.php
PHP
gpl-2.0
4,312
<?php class Redux_Validation_preg_replace { /** * Field Constructor. * * Required - must call the parent constructor, then assign field and value to vars, and obviously call the render field function * * @since Redux_Options 1.0.0 */ function __construct($field, $value, $current) { $this->field = $field; $this->value = $value; $this->current = $current; $this->validate(); } /** * Field Render Function. * * Takes the vars and validates them * * @since Redux_Options 1.0.0 */ function validate() { $this->value = preg_replace($this->field['preg']['pattern'], $this->field['preg']['replacement'], $this->value); } }
joelcoxokc/joelcoxio
wp-content/themes/vcard/options/options/validation/preg_replace/validation_preg_replace.php
PHP
gpl-2.0
659
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@zend.com so we can send you a copy immediately. * * @category Zend * @package Zend_Http * @subpackage UserAgent * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ /** * @see Zend_Browser_Exception */ #require_once 'Zend/Http/UserAgent/Exception.php'; /** * @category Zend * @package Zend_Http * @subpackage UserAgent * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ class Zend_Http_UserAgent_Features_Exception extends Zend_Http_UserAgent_Exception { }
Eristoff47/P2
src/public/lib/Zend/Http/UserAgent/Features/Exception.php
PHP
gpl-2.0
1,124
/* * Copyright (C) 2008-2012 TrinityCore <http://www.trinitycore.org/> * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ /* ScriptData SDName: Trial Of the Champion SD%Complete: SDComment: SDCategory: trial_of_the_champion EndScriptData */ /* ContentData npc_announcer_toc5 EndContentData */ #include "ScriptPCH.h" #include "trial_of_the_champion.h" #include "Vehicle.h" #define GOSSIP_START_EVENT1 "I'm ready to start challenge." #define GOSSIP_START_EVENT2 "I'm ready for the next challenge." #define ORIENTATION 4.714f /*###### ## npc_announcer_toc5 ######*/ const Position SpawnPosition = {746.261f, 657.401f, 411.681f, 4.65f}; class npc_announcer_toc5 : public CreatureScript { public: npc_announcer_toc5() : CreatureScript("npc_announcer_toc5") { } struct npc_announcer_toc5AI : public ScriptedAI { npc_announcer_toc5AI(Creature* creature) : ScriptedAI(creature) { instance = creature->GetInstanceScript(); uiSummonTimes = 0; uiPosition = 0; uiLesserChampions = 0; uiFirstBoss = 0; uiSecondBoss = 0; uiThirdBoss = 0; uiArgentChampion = 0; uiPhase = 0; uiTimer = 0; uiVehicle1GUID = 0; uiVehicle2GUID = 0; uiVehicle3GUID = 0; Champion1List.clear(); Champion2List.clear(); Champion3List.clear(); me->SetReactState(REACT_PASSIVE); me->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NON_ATTACKABLE); me->SetFlag(UNIT_NPC_FLAGS, UNIT_NPC_FLAG_GOSSIP); SetGrandChampionsForEncounter(); SetArgentChampion(); } InstanceScript* instance; uint8 uiSummonTimes; uint8 uiPosition; uint8 uiLesserChampions; uint32 uiArgentChampion; uint32 uiFirstBoss; uint32 uiSecondBoss; uint32 uiThirdBoss; uint32 uiPhase; uint32 uiTimer; uint64 uiVehicle1GUID; uint64 uiVehicle2GUID; uint64 uiVehicle3GUID; uint64 uiGrandChampionBoss1; std::list<uint64> Champion1List; std::list<uint64> Champion2List; std::list<uint64> Champion3List; void NextStep(uint32 uiTimerStep, bool bNextStep = true, uint8 uiPhaseStep = 0) { uiTimer = uiTimerStep; if (bNextStep) ++uiPhase; else uiPhase = uiPhaseStep; } void SetData(uint32 uiType, uint32 /*uiData*/) { switch (uiType) { case DATA_START: DoSummonGrandChampion(uiFirstBoss); NextStep(10000, false, 1); break; case DATA_IN_POSITION: //movement done. me->GetMotionMaster()->MovePoint(1, 735.81f, 661.92f, 412.39f); if (GameObject* go = GameObject::GetGameObject(*me, instance->GetData64(DATA_MAIN_GATE))) instance->HandleGameObject(go->GetGUID(), false); NextStep(10000, false, 3); break; case DATA_LESSER_CHAMPIONS_DEFEATED: { ++uiLesserChampions; std::list<uint64> TempList; if (uiLesserChampions == 3 || uiLesserChampions == 6) { switch (uiLesserChampions) { case 3: TempList = Champion2List; break; case 6: TempList = Champion3List; break; } for (std::list<uint64>::const_iterator itr = TempList.begin(); itr != TempList.end(); ++itr) if (Creature* summon = Unit::GetCreature(*me, *itr)) AggroAllPlayers(summon); }else if (uiLesserChampions == 9) StartGrandChampionsAttack(); break; } } } void StartGrandChampionsAttack() { Creature* pGrandChampion1 = Unit::GetCreature(*me, uiVehicle1GUID); Creature* pGrandChampion2 = Unit::GetCreature(*me, uiVehicle2GUID); Creature* pGrandChampion3 = Unit::GetCreature(*me, uiVehicle3GUID); if (pGrandChampion1 && pGrandChampion2 && pGrandChampion3) { AggroAllPlayers(pGrandChampion1); AggroAllPlayers(pGrandChampion2); AggroAllPlayers(pGrandChampion3); } } void MovementInform(uint32 uiType, uint32 uiPointId) { if (uiType != POINT_MOTION_TYPE) return; if (uiPointId == 1) { me->SetOrientation(ORIENTATION); me->SendMovementFlagUpdate(); } } void DoSummonGrandChampion(uint32 uiBoss) { ++uiSummonTimes; uint32 VEHICLE_TO_SUMMON1 = 0; uint32 VEHICLE_TO_SUMMON2 = 0; switch (uiBoss) { case 0: VEHICLE_TO_SUMMON1 = VEHICLE_MOKRA_SKILLCRUSHER_MOUNT; VEHICLE_TO_SUMMON2 = VEHICLE_ORGRIMMAR_WOLF; break; case 1: VEHICLE_TO_SUMMON1 = VEHICLE_ERESSEA_DAWNSINGER_MOUNT; VEHICLE_TO_SUMMON2 = VEHICLE_SILVERMOON_HAWKSTRIDER; break; case 2: VEHICLE_TO_SUMMON1 = VEHICLE_RUNOK_WILDMANE_MOUNT; VEHICLE_TO_SUMMON2 = VEHICLE_THUNDER_BLUFF_KODO; break; case 3: VEHICLE_TO_SUMMON1 = VEHICLE_ZUL_TORE_MOUNT; VEHICLE_TO_SUMMON2 = VEHICLE_DARKSPEAR_RAPTOR; break; case 4: VEHICLE_TO_SUMMON1 = VEHICLE_DEATHSTALKER_VESCERI_MOUNT; VEHICLE_TO_SUMMON2 = VEHICLE_FORSAKE_WARHORSE; break; default: return; } if (Creature* pBoss = me->SummonCreature(VEHICLE_TO_SUMMON1, SpawnPosition)) { switch (uiSummonTimes) { case 1: { uiVehicle1GUID = pBoss->GetGUID(); uint64 uiGrandChampionBoss1 = 0; if (Vehicle* pVehicle = pBoss->GetVehicleKit()) if (Unit* unit = pVehicle->GetPassenger(0)) uiGrandChampionBoss1 = unit->GetGUID(); if (instance) { instance->SetData64(DATA_GRAND_CHAMPION_VEHICLE_1, uiVehicle1GUID); instance->SetData64(DATA_GRAND_CHAMPION_1, uiGrandChampionBoss1); } pBoss->AI()->SetData(1, 0); break; } case 2: { uiVehicle2GUID = pBoss->GetGUID(); uint64 uiGrandChampionBoss2 = 0; if (Vehicle* pVehicle = pBoss->GetVehicleKit()) if (Unit* unit = pVehicle->GetPassenger(0)) uiGrandChampionBoss2 = unit->GetGUID(); if (instance) { instance->SetData64(DATA_GRAND_CHAMPION_VEHICLE_2, uiVehicle2GUID); instance->SetData64(DATA_GRAND_CHAMPION_2, uiGrandChampionBoss2); } pBoss->AI()->SetData(2, 0); break; } case 3: { uiVehicle3GUID = pBoss->GetGUID(); uint64 uiGrandChampionBoss3 = 0; if (Vehicle* pVehicle = pBoss->GetVehicleKit()) if (Unit* unit = pVehicle->GetPassenger(0)) uiGrandChampionBoss3 = unit->GetGUID(); if (instance) { instance->SetData64(DATA_GRAND_CHAMPION_VEHICLE_3, uiVehicle3GUID); instance->SetData64(DATA_GRAND_CHAMPION_3, uiGrandChampionBoss3); } pBoss->AI()->SetData(3, 0); break; } default: return; } for (uint8 i = 0; i < 3; ++i) { if (Creature* pAdd = me->SummonCreature(VEHICLE_TO_SUMMON2, SpawnPosition, TEMPSUMMON_CORPSE_DESPAWN)) { switch (uiSummonTimes) { case 1: Champion1List.push_back(pAdd->GetGUID()); break; case 2: Champion2List.push_back(pAdd->GetGUID()); break; case 3: Champion3List.push_back(pAdd->GetGUID()); break; } switch (i) { case 0: pAdd->GetMotionMaster()->MoveFollow(pBoss, 2.0f, M_PI); break; case 1: pAdd->GetMotionMaster()->MoveFollow(pBoss, 2.0f, M_PI / 2); break; case 2: pAdd->GetMotionMaster()->MoveFollow(pBoss, 2.0f, M_PI / 2 + M_PI); break; } } } } } void DoStartArgentChampionEncounter() { me->GetMotionMaster()->MovePoint(1, 735.81f, 661.92f, 412.39f); if (me->SummonCreature(uiArgentChampion, SpawnPosition)) { for (uint8 i = 0; i < 3; ++i) { if (Creature* pTrash = me->SummonCreature(NPC_ARGENT_LIGHWIELDER, SpawnPosition)) pTrash->AI()->SetData(i, 0); if (Creature* pTrash = me->SummonCreature(NPC_ARGENT_MONK, SpawnPosition)) pTrash->AI()->SetData(i, 0); if (Creature* pTrash = me->SummonCreature(NPC_PRIESTESS, SpawnPosition)) pTrash->AI()->SetData(i, 0); } } } void SetGrandChampionsForEncounter() { uiFirstBoss = urand(0, 4); while (uiSecondBoss == uiFirstBoss || uiThirdBoss == uiFirstBoss || uiThirdBoss == uiSecondBoss) { uiSecondBoss = urand(0, 4); uiThirdBoss = urand(0, 4); } } void SetArgentChampion() { uint8 uiTempBoss = urand(0, 1); switch (uiTempBoss) { case 0: uiArgentChampion = NPC_EADRIC; break; case 1: uiArgentChampion = NPC_PALETRESS; break; } } void StartEncounter() { if (!instance) return; me->RemoveFlag(UNIT_NPC_FLAGS, UNIT_NPC_FLAG_GOSSIP); if (instance->GetData(BOSS_BLACK_KNIGHT) == NOT_STARTED) { if (instance->GetData(BOSS_ARGENT_CHALLENGE_E) == NOT_STARTED && instance->GetData(BOSS_ARGENT_CHALLENGE_P) == NOT_STARTED) { if (instance->GetData(BOSS_GRAND_CHAMPIONS) == NOT_STARTED) me->AI()->SetData(DATA_START, 0); if (instance->GetData(BOSS_GRAND_CHAMPIONS) == DONE) DoStartArgentChampionEncounter(); } if ((instance->GetData(BOSS_GRAND_CHAMPIONS) == DONE && instance->GetData(BOSS_ARGENT_CHALLENGE_E) == DONE) || instance->GetData(BOSS_ARGENT_CHALLENGE_P) == DONE) me->SummonCreature(VEHICLE_BLACK_KNIGHT, 769.834f, 651.915f, 447.035f, 0); } } void AggroAllPlayers(Creature* temp) { Map::PlayerList const &PlList = me->GetMap()->GetPlayers(); if (PlList.isEmpty()) return; for (Map::PlayerList::const_iterator i = PlList.begin(); i != PlList.end(); ++i) { if (Player* player = i->getSource()) { if (player->isGameMaster()) continue; if (player->isAlive()) { temp->SetHomePosition(me->GetPositionX(), me->GetPositionY(), me->GetPositionZ(), me->GetOrientation()); temp->RemoveFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NON_ATTACKABLE); temp->SetReactState(REACT_AGGRESSIVE); temp->SetInCombatWith(player); player->SetInCombatWith(temp); temp->AddThreat(player, 0.0f); } } } } void UpdateAI(const uint32 uiDiff) { ScriptedAI::UpdateAI(uiDiff); if (uiTimer <= uiDiff) { switch (uiPhase) { case 1: DoSummonGrandChampion(uiSecondBoss); NextStep(10000, true); break; case 2: DoSummonGrandChampion(uiThirdBoss); NextStep(0, false); break; case 3: if (!Champion1List.empty()) { for (std::list<uint64>::const_iterator itr = Champion1List.begin(); itr != Champion1List.end(); ++itr) if (Creature* summon = Unit::GetCreature(*me, *itr)) AggroAllPlayers(summon); NextStep(0, false); } break; } } else uiTimer -= uiDiff; if (!UpdateVictim()) return; } void JustSummoned(Creature* summon) { if (instance && instance->GetData(BOSS_GRAND_CHAMPIONS) == NOT_STARTED) { summon->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NON_ATTACKABLE); summon->SetReactState(REACT_PASSIVE); } } void SummonedCreatureDespawn(Creature* summon) { switch (summon->GetEntry()) { case VEHICLE_DARNASSIA_NIGHTSABER: case VEHICLE_EXODAR_ELEKK: case VEHICLE_STORMWIND_STEED: case VEHICLE_GNOMEREGAN_MECHANOSTRIDER: case VEHICLE_IRONFORGE_RAM: case VEHICLE_FORSAKE_WARHORSE: case VEHICLE_THUNDER_BLUFF_KODO: case VEHICLE_ORGRIMMAR_WOLF: case VEHICLE_SILVERMOON_HAWKSTRIDER: case VEHICLE_DARKSPEAR_RAPTOR: me->AI()->SetData(DATA_LESSER_CHAMPIONS_DEFEATED, 0); break; } } }; CreatureAI* GetAI(Creature* creature) const { return new npc_announcer_toc5AI(creature); } bool OnGossipHello(Player* player, Creature* creature) { InstanceScript* instance = creature->GetInstanceScript(); if (instance && ((instance->GetData(BOSS_GRAND_CHAMPIONS) == DONE && instance->GetData(BOSS_BLACK_KNIGHT) == DONE && instance->GetData(BOSS_ARGENT_CHALLENGE_E) == DONE) || instance->GetData(BOSS_ARGENT_CHALLENGE_P) == DONE)) return false; if (instance && instance->GetData(BOSS_GRAND_CHAMPIONS) == NOT_STARTED && instance->GetData(BOSS_ARGENT_CHALLENGE_E) == NOT_STARTED && instance->GetData(BOSS_ARGENT_CHALLENGE_P) == NOT_STARTED && instance->GetData(BOSS_BLACK_KNIGHT) == NOT_STARTED) player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, GOSSIP_START_EVENT1, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+1); else if (instance) player->ADD_GOSSIP_ITEM(GOSSIP_ICON_CHAT, GOSSIP_START_EVENT2, GOSSIP_SENDER_MAIN, GOSSIP_ACTION_INFO_DEF+1); player->SEND_GOSSIP_MENU(player->GetGossipTextId(creature), creature->GetGUID()); return true; } bool OnGossipSelect(Player* player, Creature* creature, uint32 /*sender*/, uint32 action) { player->PlayerTalkClass->ClearMenus(); if (action == GOSSIP_ACTION_INFO_DEF+1) { player->CLOSE_GOSSIP_MENU(); CAST_AI(npc_announcer_toc5::npc_announcer_toc5AI, creature->AI())->StartEncounter(); } return true; } }; void AddSC_trial_of_the_champion() { new npc_announcer_toc5(); }
pablo93/TrinityCore
src/server/scripts/Northrend/CrusadersColiseum/TrialOfTheChampion/trial_of_the_champion.cpp
C++
gpl-2.0
18,304
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@zend.com so we can send you a copy immediately. * * @category Zend * @package Zend_Service * @subpackage DeveloperGarden * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id: SearchParameters.php 24593 2012-01-05 20:35:02Z matthew $ */ /** * @category Zend * @package Zend_Service * @subpackage DeveloperGarden * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @author Marco Kaiser * @license http://framework.zend.com/license/new-bsd New BSD License */ class Zend_Service_DeveloperGarden_LocalSearch_SearchParameters { /** * possible search parameters, incl. default values * * @var array */ private $_parameters = array( 'what' => null, 'dymwhat' => null, 'dymrelated' => null, 'hits' => null, 'collapse' => null, 'where' => null, 'dywhere' => null, 'radius' => null, 'lx' => null, 'ly' => null, 'rx' => null, 'ry' => null, 'transformgeocode' => null, 'sort' => null, 'spatial' => null, 'sepcomm' => null, 'filter' => null, // can be ONLINER or OFFLINER 'openingtime' => null, // can be now or HH::MM 'kategorie' => null, // @see http://www.suchen.de/kategorie-katalog 'site' => null, 'typ' => null, 'name' => null, 'page' => null, 'city' => null, 'plz' => null, 'strasse' => null, 'bundesland' => null, ); /** * possible collapse values * * @var array */ private $_possibleCollapseValues = array( true, false, 'ADDRESS_COMPANY', 'DOMAIN' ); /** * sets a new search word * alias for setWhat * * @param string $searchValue * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setSearchValue($searchValue) { return $this->setWhat($searchValue); } /** * sets a new search word * * @param string $searchValue * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setWhat($searchValue) { $this->_parameters['what'] = $searchValue; return $this; } /** * enable the did you mean what feature * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function enableDidYouMeanWhat() { $this->_parameters['dymwhat'] = 'true'; return $this; } /** * disable the did you mean what feature * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function disableDidYouMeanWhat() { $this->_parameters['dymwhat'] = 'false'; return $this; } /** * enable the did you mean where feature * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function enableDidYouMeanWhere() { $this->_parameters['dymwhere'] = 'true'; return $this; } /** * disable the did you mean where feature * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function disableDidYouMeanWhere() { $this->_parameters['dymwhere'] = 'false'; return $this; } /** * enable did you mean related, if true Kihno will be corrected to Kino * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function enableDidYouMeanRelated() { $this->_parameters['dymrelated'] = 'true'; return $this; } /** * diable did you mean related, if false Kihno will not be corrected to Kino * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function disableDidYouMeanRelated() { $this->_parameters['dymrelated'] = 'true'; return $this; } /** * set the max result hits for this search * * @param integer $hits * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setHits($hits = 10) { #require_once 'Zend/Validate/Between.php'; $validator = new Zend_Validate_Between(0, 1000); if (!$validator->isValid($hits)) { $message = $validator->getMessages(); #require_once 'Zend/Service/DeveloperGarden/LocalSearch/Exception.php'; throw new Zend_Service_DeveloperGarden_LocalSearch_Exception(current($message)); } $this->_parameters['hits'] = $hits; return $this; } /** * If true, addresses will be collapsed for a single domain, common values * are: * ADDRESS_COMPANY – to collapse by address * DOMAIN – to collapse by domain (same like collapse=true) * false * * @param mixed $value * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setCollapse($value) { if (!in_array($value, $this->_possibleCollapseValues, true)) { #require_once 'Zend/Service/DeveloperGarden/LocalSearch/Exception.php'; throw new Zend_Service_DeveloperGarden_LocalSearch_Exception('Not a valid value provided.'); } $this->_parameters['collapse'] = $value; return $this; } /** * set a specific search location * examples: * +47°54’53.10”, 11° 10’ 56.76” * 47°54’53.10;11°10’56.76” * 47.914750,11.182533 * +47.914750 ; +11.1824 * Darmstadt * Berlin * * @param string $where * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setWhere($where) { #require_once 'Zend/Validate/NotEmpty.php'; $validator = new Zend_Validate_NotEmpty(); if (!$validator->isValid($where)) { $message = $validator->getMessages(); #require_once 'Zend/Service/DeveloperGarden/LocalSearch/Exception.php'; throw new Zend_Service_DeveloperGarden_LocalSearch_Exception(current($message)); } $this->_parameters['where'] = $where; return $this; } /** * returns the defined search location (ie city, country) * * @return string */ public function getWhere() { return $this->_parameters['where']; } /** * enable the spatial search feature * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function enableSpatial() { $this->_parameters['spatial'] = 'true'; return $this; } /** * disable the spatial search feature * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function disableSpatial() { $this->_parameters['spatial'] = 'false'; return $this; } /** * sets spatial and the given radius for a circle search * * @param integer $radius * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setRadius($radius) { #require_once 'Zend/Validate/Int.php'; $validator = new Zend_Validate_Int(); if (!$validator->isValid($radius)) { $message = $validator->getMessages(); #require_once 'Zend/Service/DeveloperGarden/LocalSearch/Exception.php'; throw new Zend_Service_DeveloperGarden_LocalSearch_Exception(current($message)); } $this->_parameters['radius'] = $radius; $this->_parameters['transformgeocode'] = 'false'; return $this; } /** * sets the values for a rectangle search * lx = longitude left top * ly = latitude left top * rx = longitude right bottom * ry = latitude right bottom * * @param float $lx * @param float $ly * @param float $rx * @param float $ry * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setRectangle($lx, $ly, $rx, $ry) { $this->_parameters['lx'] = $lx; $this->_parameters['ly'] = $ly; $this->_parameters['rx'] = $rx; $this->_parameters['ry'] = $ry; return $this; } /** * if set, the service returns the zipcode for the result * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setTransformGeoCode() { $this->_parameters['transformgeocode'] = 'true'; $this->_parameters['radius'] = null; return $this; } /** * sets the sort value * possible values are: 'relevance' and 'distance' (only with spatial enabled) * * @param string $sort * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setSort($sort) { if (!in_array($sort, array('relevance', 'distance'))) { #require_once 'Zend/Service/DeveloperGarden/LocalSearch/Exception.php'; throw new Zend_Service_DeveloperGarden_LocalSearch_Exception('Not a valid sort value provided.'); } $this->_parameters['sort'] = $sort; return $this; } /** * enable the separation of phone numbers * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function enablePhoneSeparation() { $this->_parameters['sepcomm'] = 'true'; return $this; } /** * disable the separation of phone numbers * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function disablePhoneSeparation() { $this->_parameters['sepcomm'] = 'true'; return $this; } /** * if this filter is set, only results with a website are returned * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setFilterOnliner() { $this->_parameters['filter'] = 'ONLINER'; return $this; } /** * if this filter is set, only results without a website are returned * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setFilterOffliner() { $this->_parameters['filter'] = 'OFFLINER'; return $this; } /** * removes the filter value * * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function disableFilter() { $this->_parameters['filter'] = null; return $this; } /** * set a filter to get just results who are open at the given time * possible values: * now = open right now * HH:MM = at the given time (ie 20:00) * * @param string $time * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setOpeningTime($time = null) { $this->_parameters['openingtime'] = $time; return $this; } /** * sets a category filter * * @see http://www.suchen.de/kategorie-katalog * @param string $category * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setCategory($category = null) { $this->_parameters['kategorie'] = $category; return $this; } /** * sets the site filter * ie: www.developergarden.com * * @param string $site * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setSite($site) { $this->_parameters['site'] = $site; return $this; } /** * sets a filter to the given document type * ie: pdf, html * * @param string $type * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setDocumentType($type) { $this->_parameters['typ'] = $type; return $this; } /** * sets a filter for the company name * ie: Deutsche Telekom * * @param string $name * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setName($name) { $this->_parameters['name'] = $name; return $this; } /** * sets a filter for the zip code * * @param string $zip * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setZipCode($zip) { $this->_parameters['plz'] = $zip; return $this; } /** * sets a filter for the street * * @param string $street * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setStreet($street) { $this->_parameters['strasse'] = $street; return $this; } /** * sets a filter for the county * * @param string $county * @return Zend_Service_DeveloperGarden_LocalSearch_SearchParameters */ public function setCounty($county) { $this->_parameters['bundesland'] = $county; return $this; } /** * sets a raw parameter with the value * * @param string $key * @param mixed $value * @return unknown_type */ public function setRawParameter($key, $value) { $this->_parameters[$key] = $value; return $this; } /** * returns the parameters as an array * * @return array */ public function getSearchParameters() { $retVal = array(); foreach ($this->_parameters as $key => $value) { if ($value === null) { continue; } $param = array( 'parameter' => $key, 'value' => $value ); $retVal[] = $param; } return $retVal; } }
chemissi/P2
src/public/lib/Zend/Service/DeveloperGarden/LocalSearch/SearchParameters.php
PHP
gpl-2.0
14,788
/* * Copyright (C) 2008-2016 TrinityCore <http://www.trinitycore.org/> * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "ScriptMgr.h" #include "ScriptedCreature.h" #include "magisters_terrace.h" enum Says { SAY_AGGRO = 0, SAY_ENERGY = 1, SAY_EMPOWERED = 2, SAY_KILL = 3, SAY_DEATH = 4, EMOTE_CRYSTAL = 5 }; enum Spells { // Crystal effect spells SPELL_FEL_CRYSTAL_DUMMY = 44329, SPELL_MANA_RAGE = 44320, // This spell triggers 44321, which changes scale and regens mana Requires an entry in spell_script_target // Selin's spells SPELL_DRAIN_LIFE = 44294, SPELL_FEL_EXPLOSION = 44314, SPELL_DRAIN_MANA = 46153 // Heroic only }; enum Phases { PHASE_NORMAL = 1, PHASE_DRAIN = 2 }; enum Events { EVENT_FEL_EXPLOSION = 1, EVENT_DRAIN_CRYSTAL, EVENT_DRAIN_MANA, EVENT_DRAIN_LIFE, EVENT_EMPOWER }; enum Misc { ACTION_SWITCH_PHASE = 1 }; class boss_selin_fireheart : public CreatureScript { public: boss_selin_fireheart() : CreatureScript("boss_selin_fireheart") { } struct boss_selin_fireheartAI : public BossAI { boss_selin_fireheartAI(Creature* creature) : BossAI(creature, DATA_SELIN) { _scheduledEvents = false; } void Reset() override { Crystals.clear(); me->GetCreatureListWithEntryInGrid(Crystals, NPC_FEL_CRYSTAL, 250.0f); for (Creature* creature : Crystals) { if (!creature->IsAlive()) creature->Respawn(); creature->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE); } _Reset(); CrystalGUID.Clear(); _scheduledEvents = false; } void DoAction(int32 action) override { switch (action) { case ACTION_SWITCH_PHASE: events.SetPhase(PHASE_NORMAL); events.ScheduleEvent(EVENT_FEL_EXPLOSION, 2000, 0, PHASE_NORMAL); AttackStart(me->GetVictim()); me->GetMotionMaster()->MoveChase(me->GetVictim()); break; default: break; } } void SelectNearestCrystal() { if (Crystals.empty()) return; Crystals.sort(Trinity::ObjectDistanceOrderPred(me)); if (Creature* CrystalChosen = Crystals.front()) { Talk(SAY_ENERGY); Talk(EMOTE_CRYSTAL); DoCast(CrystalChosen, SPELL_FEL_CRYSTAL_DUMMY); CrystalGUID = CrystalChosen->GetGUID(); Crystals.remove(CrystalChosen); float x, y, z; CrystalChosen->GetClosePoint(x, y, z, me->GetObjectSize(), CONTACT_DISTANCE); events.SetPhase(PHASE_DRAIN); me->SetWalk(false); me->GetMotionMaster()->MovePoint(1, x, y, z); } } void ShatterRemainingCrystals() { if (Crystals.empty()) return; for (Creature* crystal : Crystals) { if (crystal && crystal->IsAlive()) crystal->KillSelf(); } } void EnterCombat(Unit* /*who*/) override { Talk(SAY_AGGRO); _EnterCombat(); events.SetPhase(PHASE_NORMAL); events.ScheduleEvent(EVENT_FEL_EXPLOSION, 2100, 0, PHASE_NORMAL); } void KilledUnit(Unit* victim) override { if (victim->GetTypeId() == TYPEID_PLAYER) Talk(SAY_KILL); } void MovementInform(uint32 type, uint32 id) override { if (type == POINT_MOTION_TYPE && id == 1) { Unit* CrystalChosen = ObjectAccessor::GetUnit(*me, CrystalGUID); if (CrystalChosen && CrystalChosen->IsAlive()) { CrystalChosen->RemoveFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE); CrystalChosen->CastSpell(me, SPELL_MANA_RAGE, true); events.ScheduleEvent(EVENT_EMPOWER, 10000, PHASE_DRAIN); } } } void JustDied(Unit* /*killer*/) override { Talk(SAY_DEATH); _JustDied(); ShatterRemainingCrystals(); } void UpdateAI(uint32 diff) override { if (!UpdateVictim()) return; events.Update(diff); if (me->HasUnitState(UNIT_STATE_CASTING)) return; while (uint32 eventId = events.ExecuteEvent()) { switch (eventId) { case EVENT_FEL_EXPLOSION: DoCastAOE(SPELL_FEL_EXPLOSION); events.ScheduleEvent(EVENT_FEL_EXPLOSION, 2000, 0, PHASE_NORMAL); break; case EVENT_DRAIN_CRYSTAL: SelectNearestCrystal(); _scheduledEvents = false; break; case EVENT_DRAIN_MANA: if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 45.0f, true)) DoCast(target, SPELL_DRAIN_MANA); events.ScheduleEvent(EVENT_DRAIN_MANA, 10000, 0, PHASE_NORMAL); break; case EVENT_DRAIN_LIFE: if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, 20.0f, true)) DoCast(target, SPELL_DRAIN_LIFE); events.ScheduleEvent(EVENT_DRAIN_LIFE, 10000, 0, PHASE_NORMAL); break; case EVENT_EMPOWER: { Talk(SAY_EMPOWERED); Creature* CrystalChosen = ObjectAccessor::GetCreature(*me, CrystalGUID); if (CrystalChosen && CrystalChosen->IsAlive()) CrystalChosen->KillSelf(); CrystalGUID.Clear(); me->GetMotionMaster()->Clear(); me->GetMotionMaster()->MoveChase(me->GetVictim()); break; } default: break; } } if (me->GetPowerPct(POWER_MANA) < 10.f) { if (events.IsInPhase(PHASE_NORMAL) && !_scheduledEvents) { _scheduledEvents = true; uint32 timer = urand(3000, 7000); events.ScheduleEvent(EVENT_DRAIN_LIFE, timer, 0, PHASE_NORMAL); if (IsHeroic()) { events.ScheduleEvent(EVENT_DRAIN_CRYSTAL, urand(10000, 15000), 0, PHASE_NORMAL); events.ScheduleEvent(EVENT_DRAIN_MANA, timer + 5000, 0, PHASE_NORMAL); } else events.ScheduleEvent(EVENT_DRAIN_CRYSTAL, urand(20000, 25000), 0, PHASE_NORMAL); } } DoMeleeAttackIfReady(); } private: std::list<Creature*> Crystals; ObjectGuid CrystalGUID; bool _scheduledEvents; }; CreatureAI* GetAI(Creature* creature) const override { return GetInstanceAI<boss_selin_fireheartAI>(creature); }; }; class npc_fel_crystal : public CreatureScript { public: npc_fel_crystal() : CreatureScript("npc_fel_crystal") { } struct npc_fel_crystalAI : public ScriptedAI { npc_fel_crystalAI(Creature* creature) : ScriptedAI(creature) { } void JustDied(Unit* /*killer*/) override { if (InstanceScript* instance = me->GetInstanceScript()) { Creature* Selin = ObjectAccessor::GetCreature(*me, instance->GetGuidData(DATA_SELIN)); if (Selin && Selin->IsAlive()) Selin->AI()->DoAction(ACTION_SWITCH_PHASE); } } }; CreatureAI* GetAI(Creature* creature) const override { return GetInstanceAI<npc_fel_crystalAI>(creature); }; }; void AddSC_boss_selin_fireheart() { new boss_selin_fireheart(); new npc_fel_crystal(); }
AriDEV/TrinityCore
src/server/scripts/EasternKingdoms/MagistersTerrace/boss_selin_fireheart.cpp
C++
gpl-2.0
10,108
/* Copyright_License { XCSoar Glide Computer - http://www.xcsoar.org/ Copyright (C) 2000-2016 The XCSoar Project A detailed list of copyright holders can be found in the file "AUTHORS". This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. } */ #ifndef XCSOAR_FIXED_WINDOW_WIDGET_HPP #define XCSOAR_FIXED_WINDOW_WIDGET_HPP #include "WindowWidget.hpp" #include "Screen/Window.hpp" class FixedWindowWidget : public WindowWidget { public: FixedWindowWidget() = default; FixedWindowWidget(Window *window):WindowWidget(window) {} PixelSize GetMinimumSize() const override { return GetWindow().GetSize(); } }; #endif
Exadios/XCSoar-the-library
src/Widget/FixedWindowWidget.hpp
C++
gpl-2.0
1,282
/* Copyright_License { XCSoar Glide Computer - http://www.xcsoar.org/ Copyright (C) 2000-2016 The XCSoar Project A detailed list of copyright holders can be found in the file "AUTHORS". This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. } */ #include "Init.hpp" #include <curl/curl.h> void Net::Initialise() { curl_global_init(CURL_GLOBAL_WIN32); } void Net::Deinitialise() { curl_global_cleanup(); }
Exadios/XCSoar-the-library
src/Net/HTTP/Init.cpp
C++
gpl-2.0
1,066
///////////////////////////////////////////////////////////////////////////// // Name: src/msw/mediactrl_qt.cpp // Purpose: QuickTime Media Backend for Windows // Author: Ryan Norton <wxprojects@comcast.net> // Modified by: Robin Dunn (moved QT code from mediactrl.cpp) // // Created: 11/07/04 // RCS-ID: $Id: mediactrl_qt.cpp 45498 2007-04-16 13:03:05Z VZ $ // Copyright: (c) Ryan Norton // Licence: wxWindows licence ///////////////////////////////////////////////////////////////////////////// //=========================================================================== // DECLARATIONS //=========================================================================== //--------------------------------------------------------------------------- // Pre-compiled header stuff //--------------------------------------------------------------------------- // For compilers that support precompilation, includes "wx.h". #include "wx/wxprec.h" #ifdef __BORLANDC__ #pragma hdrstop #endif #if wxUSE_MEDIACTRL #include "wx/mediactrl.h" #ifndef WX_PRECOMP #include "wx/log.h" #include "wx/dcclient.h" #include "wx/timer.h" #include "wx/math.h" // log10 & pow #endif #include "wx/msw/private.h" // user info and wndproc setting/getting #include "wx/dynlib.h" //--------------------------------------------------------------------------- // Externals (somewhere in src/msw/app.cpp and src/msw/window.cpp) //--------------------------------------------------------------------------- extern "C" WXDLLIMPEXP_BASE HINSTANCE wxGetInstance(void); #ifdef __WXWINCE__ extern WXDLLIMPEXP_CORE wxChar *wxCanvasClassName; #else extern WXDLLIMPEXP_CORE const wxChar *wxCanvasClassName; #endif LRESULT WXDLLIMPEXP_CORE APIENTRY _EXPORT wxWndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam); //--------------------------------------------------------------------------- // Killed MSVC warnings //--------------------------------------------------------------------------- //disable "cast truncates constant value" for VARIANT_BOOL values //passed as parameters in VC5 and up #ifdef _MSC_VER #pragma warning (disable:4310) #endif //--------------------------------------------------------------------------- // wxQTMediaBackend // // We don't include Quicktime headers here and define all the types // ourselves because looking for the quicktime libaries etc. would // be tricky to do and making this a dependency for the MSVC projects // would be unrealistic. // // Thanks to Robert Roebling for the wxDL macro/library idea //--------------------------------------------------------------------------- //--------------------------------------------------------------------------- // QT Includes //--------------------------------------------------------------------------- //#include <qtml.h> // Windoze QT include //#include <QuickTimeComponents.h> // Standard QT stuff #include "wx/dynlib.h" //--------------------------------------------------------------------------- // QT Types //--------------------------------------------------------------------------- typedef struct MovieRecord* Movie; typedef wxInt16 OSErr; typedef wxInt32 OSStatus; #define noErr 0 #define fsRdPerm 1 typedef unsigned char Str255[256]; #define StringPtr unsigned char* #define newMovieActive 1 #define newMovieAsyncOK (1 << 8) #define Ptr char* #define Handle Ptr* #define Fixed long #define OSType unsigned long #define CGrafPtr struct GrafPort * #define TimeScale long #define TimeBase struct TimeBaseRecord * typedef struct ComponentInstanceRecord * ComponentInstance; #define kMovieLoadStatePlayable 10000 #define Boolean int #define MovieController ComponentInstance #ifndef URLDataHandlerSubType #if defined(__WATCOMC__) || defined(__MINGW32__) // use magic numbers for compilers which complain about multicharacter integers const OSType URLDataHandlerSubType = 1970433056; const OSType VisualMediaCharacteristic = 1702454643; #else const OSType URLDataHandlerSubType = 'url '; const OSType VisualMediaCharacteristic = 'eyes'; #endif #endif struct FSSpec { short vRefNum; long parID; Str255 name; // Str63 on mac, Str255 on msw }; struct Rect { short top; short left; short bottom; short right; }; struct wide { wxInt32 hi; wxUint32 lo; }; struct TimeRecord { wide value; // units TimeScale scale; // units per second TimeBase base; }; struct Point { short v; short h; }; struct EventRecord { wxUint16 what; wxUint32 message; wxUint32 when; Point where; wxUint16 modifiers; }; enum { mcTopLeftMovie = 1, mcScaleMovieToFit = 2, mcWithBadge = 4, mcNotVisible = 8, mcWithFrame = 16 }; //--------------------------------------------------------------------------- // QT Library //--------------------------------------------------------------------------- #define wxDL_METHOD_DEFINE( rettype, name, args, shortargs, defret ) \ typedef rettype (* name ## Type) args ; \ name ## Type pfn_ ## name; \ rettype name args \ { if (m_ok) return pfn_ ## name shortargs ; return defret; } #define wxDL_VOIDMETHOD_DEFINE( name, args, shortargs ) \ typedef void (* name ## Type) args ; \ name ## Type pfn_ ## name; \ void name args \ { if (m_ok) pfn_ ## name shortargs ; } #define wxDL_METHOD_LOAD( lib, name, success ) \ pfn_ ## name = (name ## Type) lib.GetSymbol( wxT(#name), &success ); \ if (!success) return false class WXDLLIMPEXP_MEDIA wxQuickTimeLibrary { public: ~wxQuickTimeLibrary() { if (m_dll.IsLoaded()) m_dll.Unload(); } bool Initialize(); bool IsOk() const {return m_ok;} protected: wxDynamicLibrary m_dll; bool m_ok; public: wxDL_VOIDMETHOD_DEFINE( StartMovie, (Movie m), (m) ) wxDL_VOIDMETHOD_DEFINE( StopMovie, (Movie m), (m) ) wxDL_METHOD_DEFINE( bool, IsMovieDone, (Movie m), (m), false) wxDL_VOIDMETHOD_DEFINE( GoToBeginningOfMovie, (Movie m), (m) ) wxDL_METHOD_DEFINE( OSErr, GetMoviesError, (), (), -1) wxDL_METHOD_DEFINE( OSErr, EnterMovies, (), (), -1) wxDL_VOIDMETHOD_DEFINE( ExitMovies, (), () ) wxDL_METHOD_DEFINE( OSErr, InitializeQTML, (long flags), (flags), -1) wxDL_VOIDMETHOD_DEFINE( TerminateQTML, (), () ) wxDL_METHOD_DEFINE( OSErr, NativePathNameToFSSpec, (char* inName, FSSpec* outFile, long flags), (inName, outFile, flags), -1) wxDL_METHOD_DEFINE( OSErr, OpenMovieFile, (const FSSpec * fileSpec, short * resRefNum, wxInt8 permission), (fileSpec, resRefNum, permission), -1 ) wxDL_METHOD_DEFINE( OSErr, CloseMovieFile, (short resRefNum), (resRefNum), -1) wxDL_METHOD_DEFINE( OSErr, NewMovieFromFile, (Movie * theMovie, short resRefNum, short * resId, StringPtr resName, short newMovieFlags, bool * dataRefWasChanged), (theMovie, resRefNum, resId, resName, newMovieFlags, dataRefWasChanged), -1) wxDL_VOIDMETHOD_DEFINE( SetMovieRate, (Movie m, Fixed rate), (m, rate) ) wxDL_METHOD_DEFINE( Fixed, GetMovieRate, (Movie m), (m), 0) wxDL_VOIDMETHOD_DEFINE( MoviesTask, (Movie m, long maxms), (m, maxms) ) wxDL_VOIDMETHOD_DEFINE( BlockMove, (const char* p1, const char* p2, long s), (p1,p2,s) ) wxDL_METHOD_DEFINE( Handle, NewHandleClear, (long s), (s), NULL ) wxDL_METHOD_DEFINE( OSErr, NewMovieFromDataRef, (Movie * m, short flags, short * id, Handle dataRef, OSType dataRefType), (m,flags,id,dataRef,dataRefType), -1 ) wxDL_VOIDMETHOD_DEFINE( DisposeHandle, (Handle h), (h) ) wxDL_VOIDMETHOD_DEFINE( GetMovieNaturalBoundsRect, (Movie m, Rect* r), (m,r) ) wxDL_METHOD_DEFINE( void*, GetMovieIndTrackType, (Movie m, long index, OSType type, long flags), (m,index,type,flags), NULL ) wxDL_VOIDMETHOD_DEFINE( CreatePortAssociation, (void* hWnd, void* junk, long morejunk), (hWnd, junk, morejunk) ) wxDL_METHOD_DEFINE(void*, GetNativeWindowPort, (void* hWnd), (hWnd), NULL) wxDL_VOIDMETHOD_DEFINE(SetMovieGWorld, (Movie m, CGrafPtr port, void* whatever), (m, port, whatever) ) wxDL_VOIDMETHOD_DEFINE(DisposeMovie, (Movie m), (m) ) wxDL_VOIDMETHOD_DEFINE(SetMovieBox, (Movie m, Rect* r), (m,r)) wxDL_VOIDMETHOD_DEFINE(SetMovieTimeScale, (Movie m, long s), (m,s)) wxDL_METHOD_DEFINE(long, GetMovieDuration, (Movie m), (m), 0) wxDL_METHOD_DEFINE(TimeBase, GetMovieTimeBase, (Movie m), (m), 0) wxDL_METHOD_DEFINE(TimeScale, GetMovieTimeScale, (Movie m), (m), 0) wxDL_METHOD_DEFINE(long, GetMovieTime, (Movie m, void* cruft), (m,cruft), 0) wxDL_VOIDMETHOD_DEFINE(SetMovieTime, (Movie m, TimeRecord* tr), (m,tr) ) wxDL_METHOD_DEFINE(short, GetMovieVolume, (Movie m), (m), 0) wxDL_VOIDMETHOD_DEFINE(SetMovieVolume, (Movie m, short sVolume), (m,sVolume) ) wxDL_VOIDMETHOD_DEFINE(SetMovieTimeValue, (Movie m, long s), (m,s)) wxDL_METHOD_DEFINE(ComponentInstance, NewMovieController, (Movie m, const Rect* mr, long fl), (m,mr,fl), 0) wxDL_VOIDMETHOD_DEFINE(DisposeMovieController, (ComponentInstance ci), (ci)) wxDL_METHOD_DEFINE(int, MCSetVisible, (ComponentInstance m, int b), (m, b), 0) wxDL_VOIDMETHOD_DEFINE(PrePrerollMovie, (Movie m, long t, Fixed r, WXFARPROC p1, void* p2), (m,t,r,p1,p2) ) wxDL_VOIDMETHOD_DEFINE(PrerollMovie, (Movie m, long t, Fixed r), (m,t,r) ) wxDL_METHOD_DEFINE(Fixed, GetMoviePreferredRate, (Movie m), (m), 0) wxDL_METHOD_DEFINE(long, GetMovieLoadState, (Movie m), (m), 0) wxDL_METHOD_DEFINE(void*, NewRoutineDescriptor, (WXFARPROC f, int l, void* junk), (f, l, junk), 0) wxDL_VOIDMETHOD_DEFINE(DisposeRoutineDescriptor, (void* f), (f)) wxDL_METHOD_DEFINE(void*, GetCurrentArchitecture, (), (), 0) wxDL_METHOD_DEFINE(int, MCDoAction, (ComponentInstance ci, long f, void* p), (ci,f,p), 0) wxDL_VOIDMETHOD_DEFINE(MCSetControllerBoundsRect, (ComponentInstance ci, Rect* r), (ci,r)) wxDL_VOIDMETHOD_DEFINE(DestroyPortAssociation, (CGrafPtr g), (g)) wxDL_VOIDMETHOD_DEFINE(NativeEventToMacEvent, (MSG* p1, EventRecord* p2), (p1,p2)) wxDL_VOIDMETHOD_DEFINE(MCIsPlayerEvent, (ComponentInstance ci, EventRecord* p2), (ci, p2)) wxDL_METHOD_DEFINE(int, MCSetMovie, (ComponentInstance ci, Movie m, void* p1, Point w), (ci,m,p1,w),0) wxDL_VOIDMETHOD_DEFINE(MCPositionController, (ComponentInstance ci, Rect* r, void* junk, void* morejunk), (ci,r,junk,morejunk)) wxDL_VOIDMETHOD_DEFINE(MCSetActionFilterWithRefCon, (ComponentInstance ci, WXFARPROC cb, void* ref), (ci,cb,ref)) wxDL_VOIDMETHOD_DEFINE(MCGetControllerInfo, (MovieController mc, long* flags), (mc,flags)) wxDL_VOIDMETHOD_DEFINE(BeginUpdate, (CGrafPtr port), (port)) wxDL_VOIDMETHOD_DEFINE(UpdateMovie, (Movie m), (m)) wxDL_VOIDMETHOD_DEFINE(EndUpdate, (CGrafPtr port), (port)) wxDL_METHOD_DEFINE( OSErr, GetMoviesStickyError, (), (), -1) }; bool wxQuickTimeLibrary::Initialize() { m_ok = false; // Turn off the wxDynamicLibrary logging as we're prepared to handle the // errors wxLogNull nolog; if (!m_dll.Load(wxT("qtmlClient.dll"))) { return false; } wxDL_METHOD_LOAD( m_dll, StartMovie, m_ok ); wxDL_METHOD_LOAD( m_dll, StopMovie, m_ok ); wxDL_METHOD_LOAD( m_dll, IsMovieDone, m_ok ); wxDL_METHOD_LOAD( m_dll, GoToBeginningOfMovie, m_ok ); wxDL_METHOD_LOAD( m_dll, GetMoviesError, m_ok ); wxDL_METHOD_LOAD( m_dll, EnterMovies, m_ok ); wxDL_METHOD_LOAD( m_dll, ExitMovies, m_ok ); wxDL_METHOD_LOAD( m_dll, InitializeQTML, m_ok ); wxDL_METHOD_LOAD( m_dll, TerminateQTML, m_ok ); wxDL_METHOD_LOAD( m_dll, NativePathNameToFSSpec, m_ok ); wxDL_METHOD_LOAD( m_dll, OpenMovieFile, m_ok ); wxDL_METHOD_LOAD( m_dll, CloseMovieFile, m_ok ); wxDL_METHOD_LOAD( m_dll, NewMovieFromFile, m_ok ); wxDL_METHOD_LOAD( m_dll, GetMovieRate, m_ok ); wxDL_METHOD_LOAD( m_dll, SetMovieRate, m_ok ); wxDL_METHOD_LOAD( m_dll, MoviesTask, m_ok ); wxDL_METHOD_LOAD( m_dll, BlockMove, m_ok ); wxDL_METHOD_LOAD( m_dll, NewHandleClear, m_ok ); wxDL_METHOD_LOAD( m_dll, NewMovieFromDataRef, m_ok ); wxDL_METHOD_LOAD( m_dll, DisposeHandle, m_ok ); wxDL_METHOD_LOAD( m_dll, GetMovieNaturalBoundsRect, m_ok ); wxDL_METHOD_LOAD( m_dll, GetMovieIndTrackType, m_ok ); wxDL_METHOD_LOAD( m_dll, CreatePortAssociation, m_ok ); wxDL_METHOD_LOAD( m_dll, DestroyPortAssociation, m_ok ); wxDL_METHOD_LOAD( m_dll, GetNativeWindowPort, m_ok ); wxDL_METHOD_LOAD( m_dll, SetMovieGWorld, m_ok ); wxDL_METHOD_LOAD( m_dll, DisposeMovie, m_ok ); wxDL_METHOD_LOAD( m_dll, SetMovieBox, m_ok ); wxDL_METHOD_LOAD( m_dll, SetMovieTimeScale, m_ok ); wxDL_METHOD_LOAD( m_dll, GetMovieDuration, m_ok ); wxDL_METHOD_LOAD( m_dll, GetMovieTimeBase, m_ok ); wxDL_METHOD_LOAD( m_dll, GetMovieTimeScale, m_ok ); wxDL_METHOD_LOAD( m_dll, GetMovieTime, m_ok ); wxDL_METHOD_LOAD( m_dll, SetMovieTime, m_ok ); wxDL_METHOD_LOAD( m_dll, GetMovieVolume, m_ok ); wxDL_METHOD_LOAD( m_dll, SetMovieVolume, m_ok ); wxDL_METHOD_LOAD( m_dll, SetMovieTimeValue, m_ok ); wxDL_METHOD_LOAD( m_dll, NewMovieController, m_ok ); wxDL_METHOD_LOAD( m_dll, DisposeMovieController, m_ok ); wxDL_METHOD_LOAD( m_dll, MCSetVisible, m_ok ); wxDL_METHOD_LOAD( m_dll, PrePrerollMovie, m_ok ); wxDL_METHOD_LOAD( m_dll, PrerollMovie, m_ok ); wxDL_METHOD_LOAD( m_dll, GetMoviePreferredRate, m_ok ); wxDL_METHOD_LOAD( m_dll, GetMovieLoadState, m_ok ); wxDL_METHOD_LOAD( m_dll, MCDoAction, m_ok ); wxDL_METHOD_LOAD( m_dll, MCSetControllerBoundsRect, m_ok ); wxDL_METHOD_LOAD( m_dll, NativeEventToMacEvent, m_ok ); wxDL_METHOD_LOAD( m_dll, MCIsPlayerEvent, m_ok ); wxDL_METHOD_LOAD( m_dll, MCSetMovie, m_ok ); wxDL_METHOD_LOAD( m_dll, MCSetActionFilterWithRefCon, m_ok ); wxDL_METHOD_LOAD( m_dll, MCGetControllerInfo, m_ok ); wxDL_METHOD_LOAD( m_dll, BeginUpdate, m_ok ); wxDL_METHOD_LOAD( m_dll, UpdateMovie, m_ok ); wxDL_METHOD_LOAD( m_dll, EndUpdate, m_ok ); wxDL_METHOD_LOAD( m_dll, GetMoviesStickyError, m_ok ); m_ok = true; return true; } class WXDLLIMPEXP_MEDIA wxQTMediaBackend : public wxMediaBackendCommonBase { public: wxQTMediaBackend(); virtual ~wxQTMediaBackend(); virtual bool CreateControl(wxControl* ctrl, wxWindow* parent, wxWindowID id, const wxPoint& pos, const wxSize& size, long style, const wxValidator& validator, const wxString& name); virtual bool Play(); virtual bool Pause(); virtual bool Stop(); virtual bool Load(const wxURI& location, const wxURI& proxy) { return wxMediaBackend::Load(location, proxy); } virtual bool Load(const wxString& fileName); virtual bool Load(const wxURI& location); virtual wxMediaState GetState(); virtual bool SetPosition(wxLongLong where); virtual wxLongLong GetPosition(); virtual wxLongLong GetDuration(); virtual void Move(int x, int y, int w, int h); wxSize GetVideoSize() const; virtual double GetPlaybackRate(); virtual bool SetPlaybackRate(double dRate); virtual double GetVolume(); virtual bool SetVolume(double); void Cleanup(); void FinishLoad(); static void PPRMProc (Movie theMovie, OSErr theErr, void* theRefCon); // TODO: Last param actually long - does this work on 64bit machines? static Boolean MCFilterProc(MovieController theController, short action, void *params, LONG_PTR refCon); static LRESULT CALLBACK QTWndProc(HWND, UINT, WPARAM, LPARAM); virtual bool ShowPlayerControls(wxMediaCtrlPlayerControls flags); wxSize m_bestSize; // Original movie size Movie m_movie; // QT Movie handle/instance bool m_bVideo; // Whether or not we have video bool m_bPlaying; // Whether or not movie is playing wxTimer* m_timer; // Load or Play timer wxQuickTimeLibrary m_lib; // DLL to load functions from ComponentInstance m_pMC; // Movie Controller friend class wxQTMediaEvtHandler; DECLARE_DYNAMIC_CLASS(wxQTMediaBackend) }; // helper to hijack background erasing for the QT window class WXDLLIMPEXP_MEDIA wxQTMediaEvtHandler : public wxEvtHandler { public: wxQTMediaEvtHandler(wxQTMediaBackend *qtb, WXHWND hwnd) { m_qtb = qtb; m_hwnd = hwnd; m_qtb->m_ctrl->Connect(m_qtb->m_ctrl->GetId(), wxEVT_ERASE_BACKGROUND, wxEraseEventHandler(wxQTMediaEvtHandler::OnEraseBackground), NULL, this); } void OnEraseBackground(wxEraseEvent& event); private: wxQTMediaBackend *m_qtb; WXHWND m_hwnd; DECLARE_NO_COPY_CLASS(wxQTMediaEvtHandler) }; //=========================================================================== // IMPLEMENTATION //=========================================================================== //--------------------------------------------------------------------------- // wxQTMediaBackend // // TODO: Use a less kludgy way to pause/get state/set state // FIXME: Greg Hazel reports that sometimes files that cannot be played // with this backend are treated as playable anyway - not verified though. //--------------------------------------------------------------------------- IMPLEMENT_DYNAMIC_CLASS(wxQTMediaBackend, wxMediaBackend) // Time between timer calls - this is the Apple recommendation to the TCL // team I believe #define MOVIE_DELAY 20 //--------------------------------------------------------------------------- // wxQTLoadTimer // // QT, esp. QT for Windows is very picky about how you go about // async loading. If you were to go through a Windows message loop // or a MoviesTask or both and then check the movie load state // it would still return 1000 (loading)... even (pre)prerolling doesn't // help. However, making a load timer like this works //--------------------------------------------------------------------------- class wxQTLoadTimer : public wxTimer { public: wxQTLoadTimer(Movie movie, wxQTMediaBackend* parent, wxQuickTimeLibrary* pLib) : m_movie(movie), m_parent(parent), m_pLib(pLib) {} void Notify() { m_pLib->MoviesTask(m_movie, 0); // kMovieLoadStatePlayable if (m_pLib->GetMovieLoadState(m_movie) >= 10000) { m_parent->FinishLoad(); delete this; } } protected: Movie m_movie; //Our movie instance wxQTMediaBackend* m_parent; //Backend pointer wxQuickTimeLibrary* m_pLib; //Interfaces }; // -------------------------------------------------------------------------- // wxQTPlayTimer - Handle Asyncronous Playing // // 1) Checks to see if the movie is done, and if not continues // streaming the movie // 2) Sends the wxEVT_MEDIA_STOP event if we have reached the end of // the movie. // -------------------------------------------------------------------------- class wxQTPlayTimer : public wxTimer { public: wxQTPlayTimer(Movie movie, wxQTMediaBackend* parent, wxQuickTimeLibrary* pLib) : m_movie(movie), m_parent(parent), m_pLib(pLib) {} void Notify() { // // OK, a little explaining - basically originally // we only called MoviesTask if the movie was actually // playing (not paused or stopped)... this was before // we realized MoviesTask actually handles repainting // of the current frame - so if you were to resize // or something it would previously not redraw that // portion of the movie. // // So now we call MoviesTask always so that it repaints // correctly. // m_pLib->MoviesTask(m_movie, 0); // // Handle the stop event - if the movie has reached // the end, notify our handler // // m_bPlaying == !(Stopped | Paused) // if (m_parent->m_bPlaying) { if (m_pLib->IsMovieDone(m_movie)) { if ( m_parent->SendStopEvent() ) { m_parent->Stop(); wxASSERT(m_pLib->GetMoviesError() == noErr); m_parent->QueueFinishEvent(); } } } } protected: Movie m_movie; // Our movie instance wxQTMediaBackend* m_parent; //Backend pointer wxQuickTimeLibrary* m_pLib; //Interfaces }; //--------------------------------------------------------------------------- // wxQTMediaBackend::QTWndProc // // Forwards events to the Movie Controller so that it can // redraw itself/process messages etc.. //--------------------------------------------------------------------------- LRESULT CALLBACK wxQTMediaBackend::QTWndProc(HWND hWnd, UINT nMsg, WPARAM wParam, LPARAM lParam) { wxQTMediaBackend* pThis = (wxQTMediaBackend*)wxGetWindowUserData(hWnd); MSG msg; msg.hwnd = hWnd; msg.message = nMsg; msg.wParam = wParam; msg.lParam = lParam; msg.time = 0; msg.pt.x = 0; msg.pt.y = 0; EventRecord theEvent; pThis->m_lib.NativeEventToMacEvent(&msg, &theEvent); pThis->m_lib.MCIsPlayerEvent(pThis->m_pMC, &theEvent); return pThis->m_ctrl->MSWWindowProc(nMsg, wParam, lParam); } //--------------------------------------------------------------------------- // wxQTMediaBackend Destructor // // Sets m_timer to NULL signifying we havn't loaded anything yet //--------------------------------------------------------------------------- wxQTMediaBackend::wxQTMediaBackend() : m_movie(NULL), m_bPlaying(false), m_timer(NULL), m_pMC(NULL) { } //--------------------------------------------------------------------------- // wxQTMediaBackend Destructor // // 1) Cleans up the QuickTime movie instance // 2) Decrements the QuickTime reference counter - if this reaches // 0, QuickTime shuts down // 3) Decrements the QuickTime Windows Media Layer reference counter - // if this reaches 0, QuickTime shuts down the Windows Media Layer //--------------------------------------------------------------------------- wxQTMediaBackend::~wxQTMediaBackend() { if (m_movie) Cleanup(); if (m_lib.IsOk()) { if (m_pMC) { m_lib.DisposeMovieController(m_pMC); // m_pMC = NULL; } // destroy wxQTMediaEvtHandler we pushed on it m_ctrl->PopEventHandler(true); m_lib.DestroyPortAssociation( (CGrafPtr)m_lib.GetNativeWindowPort(m_ctrl->GetHWND())); //Note that ExitMovies() is not necessary, but //the docs are fuzzy on whether or not TerminateQTML is m_lib.ExitMovies(); m_lib.TerminateQTML(); } } //--------------------------------------------------------------------------- // wxQTMediaBackend::CreateControl // // 1) Intializes QuickTime // 2) Creates the control window //--------------------------------------------------------------------------- bool wxQTMediaBackend::CreateControl(wxControl* ctrl, wxWindow* parent, wxWindowID id, const wxPoint& pos, const wxSize& size, long style, const wxValidator& validator, const wxString& name) { if (!m_lib.Initialize()) return false; int nError = m_lib.InitializeQTML(0); if (nError != noErr) //-2093 no dll { wxFAIL_MSG(wxString::Format(wxT("Couldn't Initialize Quicktime-%i"), nError)); return false; } m_lib.EnterMovies(); // Create window // By default wxWindow(s) is created with a border - // so we need to get rid of those // // Since we don't have a child window like most other // backends, we don't need wxCLIP_CHILDREN if ( !ctrl->wxControl::Create(parent, id, pos, size, (style & ~wxBORDER_MASK) | wxBORDER_NONE, validator, name) ) { return false; } m_ctrl = wxStaticCast(ctrl, wxMediaCtrl); // Create a port association for our window so we // can use it as a WindowRef m_lib.CreatePortAssociation(m_ctrl->GetHWND(), NULL, 0L); // Part of a suggestion from Greg Hazel // to repaint movie when idle m_ctrl->PushEventHandler(new wxQTMediaEvtHandler(this, m_ctrl->GetHWND())); // done return true; } //--------------------------------------------------------------------------- // wxQTMediaBackend::Load (file version) // // 1) Get an FSSpec from the Windows path name // 2) Open the movie // 3) Obtain the movie instance from the movie resource // 4) Close the movie resource // 5) Finish loading //--------------------------------------------------------------------------- bool wxQTMediaBackend::Load(const wxString& fileName) { if (m_movie) Cleanup(); bool result = true; OSErr err = noErr; short movieResFile = 0; //= 0 because of annoying VC6 warning FSSpec sfFile; err = m_lib.NativePathNameToFSSpec( (char*) (const char*) fileName.mb_str(), &sfFile, 0); result = (err == noErr); if (result) { err = m_lib.OpenMovieFile(&sfFile, &movieResFile, fsRdPerm); result = (err == noErr); } if (result) { short movieResID = 0; Str255 movieName; err = m_lib.NewMovieFromFile( &m_movie, movieResFile, &movieResID, movieName, newMovieActive, NULL ); // wasChanged result = (err == noErr /*&& m_lib.GetMoviesStickyError() == noErr*/); // check m_lib.GetMoviesStickyError() because it may not find the // proper codec and play black video and other strange effects, // not to mention mess up the dynamic backend loading scheme // of wxMediaCtrl - so it just does what the QuickTime player does if (result) { m_lib.CloseMovieFile(movieResFile); FinishLoad(); } } return result; } //--------------------------------------------------------------------------- // wxQTMediaBackend::PPRMProc (static) // // Called when done PrePrerolling the movie. // Note that in 99% of the cases this does nothing... // Anyway we set up the loading timer here to tell us when the movie is done //--------------------------------------------------------------------------- void wxQTMediaBackend::PPRMProc (Movie theMovie, OSErr WXUNUSED_UNLESS_DEBUG(theErr), void* theRefCon) { wxASSERT( theMovie ); wxASSERT( theRefCon ); wxASSERT( theErr == noErr ); wxQTMediaBackend* pBE = (wxQTMediaBackend*) theRefCon; long lTime = pBE->m_lib.GetMovieTime(theMovie,NULL); Fixed rate = pBE->m_lib.GetMoviePreferredRate(theMovie); pBE->m_lib.PrerollMovie(theMovie, lTime, rate); pBE->m_timer = new wxQTLoadTimer(pBE->m_movie, pBE, &pBE->m_lib); pBE->m_timer->Start(MOVIE_DELAY); } //--------------------------------------------------------------------------- // wxQTMediaBackend::Load (URL Version) // // 1) Build an escaped URI from location // 2) Create a handle to store the URI string // 3) Put the URI string inside the handle // 4) Make a QuickTime URL data ref from the handle with the URI in it // 5) Clean up the URI string handle // 6) Do some prerolling // 7) Finish Loading //--------------------------------------------------------------------------- bool wxQTMediaBackend::Load(const wxURI& location) { if (m_movie) Cleanup(); wxString theURI = location.BuildURI(); Handle theHandle = m_lib.NewHandleClear(theURI.length() + 1); wxASSERT(theHandle); m_lib.BlockMove(theURI.mb_str(), *theHandle, theURI.length() + 1); // create the movie from the handle that refers to the URI OSErr err = m_lib.NewMovieFromDataRef(&m_movie, newMovieActive | newMovieAsyncOK /* | newMovieIdleImportOK */, NULL, theHandle, URLDataHandlerSubType); m_lib.DisposeHandle(theHandle); if (err == noErr) { long timeNow; Fixed playRate; timeNow = m_lib.GetMovieTime(m_movie, NULL); wxASSERT(m_lib.GetMoviesError() == noErr); playRate = m_lib.GetMoviePreferredRate(m_movie); wxASSERT(m_lib.GetMoviesError() == noErr); // Note that the callback here is optional, // but without it PrePrerollMovie can be buggy // (see Apple ml). Also, some may wonder // why we need this at all - this is because // Apple docs say QuickTime streamed movies // require it if you don't use a Movie Controller, // which we don't by default. // m_lib.PrePrerollMovie(m_movie, timeNow, playRate, (WXFARPROC)wxQTMediaBackend::PPRMProc, (void*)this); return true; } else return false; } //--------------------------------------------------------------------------- // wxQTMediaBackend::FinishLoad // // 1) Create the movie timer // 2) Get real size of movie for GetBestSize/sizers // 3) Set the movie time scale to something usable so that seeking // etc. will work correctly // 4) Set our Movie Controller to display the movie if it exists, // otherwise set the bounds of the Movie // 5) Refresh parent window //--------------------------------------------------------------------------- void wxQTMediaBackend::FinishLoad() { // Create the playing/streaming timer m_timer = new wxQTPlayTimer(m_movie, (wxQTMediaBackend*) this, &m_lib); wxASSERT(m_timer); m_timer->Start(MOVIE_DELAY, wxTIMER_CONTINUOUS); // get the real size of the movie Rect outRect; memset(&outRect, 0, sizeof(Rect)); // suppress annoying VC6 warning m_lib.GetMovieNaturalBoundsRect (m_movie, &outRect); wxASSERT(m_lib.GetMoviesError() == noErr); m_bestSize.x = outRect.right - outRect.left; m_bestSize.y = outRect.bottom - outRect.top; // Handle the movie GWorld if (m_pMC) { Point thePoint; thePoint.h = thePoint.v = 0; m_lib.MCSetMovie(m_pMC, m_movie, m_lib.GetNativeWindowPort(m_ctrl->GetHandle()), thePoint); m_lib.MCSetVisible(m_pMC, true); m_bestSize.y += 16; } else { m_lib.SetMovieGWorld(m_movie, (CGrafPtr) m_lib.GetNativeWindowPort(m_ctrl->GetHWND()), NULL); } // Set the movie to millisecond precision m_lib.SetMovieTimeScale(m_movie, 1000); wxASSERT(m_lib.GetMoviesError() == noErr); NotifyMovieLoaded(); } //--------------------------------------------------------------------------- // wxQTMediaBackend::Play // // 1) Start the QT movie // 2) Start the movie loading timer // // NOTE: This will still return success even when // the movie is still loading, and as mentioned in wxQTLoadTimer // I don't know of a way to force this to be sync - so if its // still loading the function will return true but the movie will // still be in the stopped state //--------------------------------------------------------------------------- bool wxQTMediaBackend::Play() { m_lib.StartMovie(m_movie); m_bPlaying = true; return m_lib.GetMoviesError() == noErr; } //--------------------------------------------------------------------------- // wxQTMediaBackend::Pause // // 1) Stop the movie // 2) Stop the movie timer //--------------------------------------------------------------------------- bool wxQTMediaBackend::Pause() { m_bPlaying = false; m_lib.StopMovie(m_movie); return m_lib.GetMoviesError() == noErr; } //--------------------------------------------------------------------------- // wxQTMediaBackend::Stop // // 1) Stop the movie // 2) Stop the movie timer // 3) Seek to the beginning of the movie //--------------------------------------------------------------------------- bool wxQTMediaBackend::Stop() { m_bPlaying = false; m_lib.StopMovie(m_movie); if (m_lib.GetMoviesError() == noErr) m_lib.GoToBeginningOfMovie(m_movie); return m_lib.GetMoviesError() == noErr; } //--------------------------------------------------------------------------- // wxQTMediaBackend::GetPlaybackRate // // Get the movie playback rate from ::GetMovieRate //--------------------------------------------------------------------------- double wxQTMediaBackend::GetPlaybackRate() { return ( ((double)m_lib.GetMovieRate(m_movie)) / 0x10000); } //--------------------------------------------------------------------------- // wxQTMediaBackend::SetPlaybackRate // // Convert dRate to Fixed and Set the movie rate through SetMovieRate //--------------------------------------------------------------------------- bool wxQTMediaBackend::SetPlaybackRate(double dRate) { m_lib.SetMovieRate(m_movie, (Fixed) (dRate * 0x10000)); return m_lib.GetMoviesError() == noErr; } //--------------------------------------------------------------------------- // wxQTMediaBackend::SetPosition // // 1) Create a time record struct (TimeRecord) with appropriate values // 2) Pass struct to SetMovieTime //--------------------------------------------------------------------------- bool wxQTMediaBackend::SetPosition(wxLongLong where) { // NB: For some reason SetMovieTime does not work // correctly with the Quicktime Windows SDK (6) // From Muskelkatermann at the wxForum // http://www.solidsteel.nl/users/wxwidgets/viewtopic.php?t=2957 // RN - note that I have not verified this but there // is no harm in calling SetMovieTimeValue instead #if 0 TimeRecord theTimeRecord; memset(&theTimeRecord, 0, sizeof(TimeRecord)); theTimeRecord.value.lo = where.GetLo(); theTimeRecord.scale = m_lib.GetMovieTimeScale(m_movie); theTimeRecord.base = m_lib.GetMovieTimeBase(m_movie); m_lib.SetMovieTime(m_movie, &theTimeRecord); #else m_lib.SetMovieTimeValue(m_movie, where.GetLo()); #endif return (m_lib.GetMoviesError() == noErr); } //--------------------------------------------------------------------------- // wxQTMediaBackend::GetPosition // // 1) Calls GetMovieTime to get the position we are in in the movie // in milliseconds (we called //--------------------------------------------------------------------------- wxLongLong wxQTMediaBackend::GetPosition() { return m_lib.GetMovieTime(m_movie, NULL); } //--------------------------------------------------------------------------- // wxQTMediaBackend::GetVolume // // Gets the volume through GetMovieVolume - which returns a 16 bit short - // // +--------+--------+ // + (1) + (2) + // +--------+--------+ // // (1) first 8 bits are value before decimal // (2) second 8 bits are value after decimal // // Volume ranges from -1.0 (gain but no sound), 0 (no sound and no gain) to // 1 (full gain and sound) //--------------------------------------------------------------------------- double wxQTMediaBackend::GetVolume() { short sVolume = m_lib.GetMovieVolume(m_movie); wxASSERT(m_lib.GetMoviesError() == noErr); if (sVolume & (128 << 8)) //negative - no sound return 0.0; return sVolume / 256.0; } //--------------------------------------------------------------------------- // wxQTMediaBackend::SetVolume // // Sets the volume through SetMovieVolume - which takes a 16 bit short - // // +--------+--------+ // + (1) + (2) + // +--------+--------+ // // (1) first 8 bits are value before decimal // (2) second 8 bits are value after decimal // // Volume ranges from -1.0 (gain but no sound), 0 (no sound and no gain) to // 1 (full gain and sound) //--------------------------------------------------------------------------- bool wxQTMediaBackend::SetVolume(double dVolume) { m_lib.SetMovieVolume(m_movie, (short) (dVolume * 256)); return m_lib.GetMoviesError() == noErr; } //--------------------------------------------------------------------------- // wxQTMediaBackend::GetDuration // // Calls GetMovieDuration //--------------------------------------------------------------------------- wxLongLong wxQTMediaBackend::GetDuration() { return m_lib.GetMovieDuration(m_movie); } //--------------------------------------------------------------------------- // wxQTMediaBackend::GetState // // Determines the current state: // if we are at the beginning, then we are stopped //--------------------------------------------------------------------------- wxMediaState wxQTMediaBackend::GetState() { if (m_bPlaying) return wxMEDIASTATE_PLAYING; else if ( !m_movie || wxQTMediaBackend::GetPosition() == 0 ) return wxMEDIASTATE_STOPPED; else return wxMEDIASTATE_PAUSED; } //--------------------------------------------------------------------------- // wxQTMediaBackend::Cleanup // // Diposes of the movie timer, Disassociates the Movie Controller with // movie and hides it if it exists, and stops and disposes // of the QT movie //--------------------------------------------------------------------------- void wxQTMediaBackend::Cleanup() { m_bPlaying = false; if (m_timer) { delete m_timer; m_timer = NULL; } m_lib.StopMovie(m_movie); if (m_pMC) { Point thePoint; thePoint.h = thePoint.v = 0; m_lib.MCSetVisible(m_pMC, false); m_lib.MCSetMovie(m_pMC, NULL, NULL, thePoint); } m_lib.DisposeMovie(m_movie); m_movie = NULL; } //--------------------------------------------------------------------------- // wxQTMediaBackend::ShowPlayerControls // // Creates a movie controller for the Movie if the user wants it //--------------------------------------------------------------------------- bool wxQTMediaBackend::ShowPlayerControls(wxMediaCtrlPlayerControls flags) { if (m_pMC) { // restore old wndproc wxSetWindowProc((HWND)m_ctrl->GetHWND(), wxWndProc); m_lib.DisposeMovieController(m_pMC); m_pMC = NULL; // movie controller height m_bestSize.y -= 16; } if (flags && m_movie) { Rect rect; wxRect wxrect = m_ctrl->GetClientRect(); // make room for controller if (wxrect.width < 320) wxrect.width = 320; rect.top = (short)wxrect.y; rect.left = (short)wxrect.x; rect.right = (short)(rect.left + wxrect.width); rect.bottom = (short)(rect.top + wxrect.height); if (!m_pMC) { m_pMC = m_lib.NewMovieController(m_movie, &rect, mcTopLeftMovie | // mcScaleMovieToFit | // mcWithBadge | mcWithFrame); m_lib.MCDoAction(m_pMC, 32, (void*)true); // mcActionSetKeysEnabled m_lib.MCSetActionFilterWithRefCon(m_pMC, (WXFARPROC)wxQTMediaBackend::MCFilterProc, (void*)this); m_bestSize.y += 16; // movie controller height // By default the movie controller uses its own colour palette // for the movie which can be bad on some files, so turn it off. // Also turn off its frame / border for the movie // Also take care of a couple of the interface flags here long mcFlags = 0; m_lib.MCDoAction(m_pMC, 39/*mcActionGetFlags*/, (void*)&mcFlags); mcFlags |= // (1<< 0) /*mcFlagSuppressMovieFrame*/ | (1<< 3) /*mcFlagsUseWindowPalette*/ | ((flags & wxMEDIACTRLPLAYERCONTROLS_STEP) ? 0 : (1<< 1) /*mcFlagSuppressStepButtons*/) | ((flags & wxMEDIACTRLPLAYERCONTROLS_VOLUME) ? 0 : (1<< 2) /*mcFlagSuppressSpeakerButton*/) // | (1<< 4) /*mcFlagDontInvalidate*/ // if we take care of repainting ourselves ; m_lib.MCDoAction(m_pMC, 38/*mcActionSetFlags*/, (void*)mcFlags); // intercept the wndproc of our control window wxSetWindowProc((HWND)m_ctrl->GetHWND(), wxQTMediaBackend::QTWndProc); // set the user data of our window wxSetWindowUserData((HWND)m_ctrl->GetHWND(), this); } } NotifyMovieSizeChanged(); return m_lib.GetMoviesError() == noErr; } //--------------------------------------------------------------------------- // wxQTMediaBackend::MCFilterProc (static) // // Callback for when the movie controller recieves a message //--------------------------------------------------------------------------- Boolean wxQTMediaBackend::MCFilterProc(MovieController WXUNUSED(theController), short action, void * WXUNUSED(params), LONG_PTR refCon) { // NB: potential optimisation // if (action == 1) // return 0; wxQTMediaBackend* pThis = (wxQTMediaBackend*)refCon; switch (action) { case 1: // don't process idle events break; case 8: // play button triggered - MC will set movie to opposite state // of current - playing ? paused : playing if (pThis) pThis->m_bPlaying = !(pThis->m_bPlaying); // NB: Sometimes it doesn't redraw properly - // if you click on the button but don't move the mouse // the button will not change its state until you move // mcActionDraw and Refresh/Update combo do nothing // to help this unfortunately break; default: break; } return 0; } //--------------------------------------------------------------------------- // wxQTMediaBackend::GetVideoSize // // Returns the actual size of the QT movie //--------------------------------------------------------------------------- wxSize wxQTMediaBackend::GetVideoSize() const { return m_bestSize; } //--------------------------------------------------------------------------- // wxQTMediaBackend::Move // // Sets the bounds of either the Movie or Movie Controller //--------------------------------------------------------------------------- void wxQTMediaBackend::Move(int WXUNUSED(x), int WXUNUSED(y), int w, int h) { if (m_movie) { // make room for controller if (m_pMC) { if (w < 320) w = 320; Rect theRect = {0, 0, (short)h, (short)w}; m_lib.MCSetControllerBoundsRect(m_pMC, &theRect); } else { Rect theRect = {0, 0, (short)h, (short)w}; m_lib.SetMovieBox(m_movie, &theRect); } wxASSERT(m_lib.GetMoviesError() == noErr); } } //--------------------------------------------------------------------------- // wxQTMediaBackend::OnEraseBackground // // Suggestion from Greg Hazel to repaint the movie when idle // (on pause also) // // TODO: We may be repainting too much here - under what exact circumstances // do we need this? I think Move also repaints correctly for the Movie // Controller, so in that instance we don't need this either //--------------------------------------------------------------------------- void wxQTMediaEvtHandler::OnEraseBackground(wxEraseEvent& evt) { wxQuickTimeLibrary& m_pLib = m_qtb->m_lib; if ( m_qtb->m_pMC ) { // repaint movie controller m_pLib.MCDoAction(m_qtb->m_pMC, 2 /*mcActionDraw*/, m_pLib.GetNativeWindowPort(m_hwnd)); } else if ( m_qtb->m_movie ) { // no movie controller CGrafPtr port = (CGrafPtr)m_pLib.GetNativeWindowPort(m_hwnd); m_pLib.BeginUpdate(port); m_pLib.UpdateMovie(m_qtb->m_movie); wxASSERT(m_pLib.GetMoviesError() == noErr); m_pLib.EndUpdate(port); } else { // no movie // let the system repaint the window evt.Skip(); } } //--------------------------------------------------------------------------- // End QT Backend //--------------------------------------------------------------------------- // in source file that contains stuff you don't directly use #include "wx/html/forcelnk.h" FORCE_LINK_ME(wxmediabackend_qt) #endif // wxUSE_MEDIACTRL && wxUSE_ACTIVEX
radiaku/decoda
libs/wxWidgets/src/msw/mediactrl_qt.cpp
C++
gpl-3.0
45,849
///////////////////////////////////////////////////////////////////////////// // Name: src/msw/gdiobj.cpp // Purpose: wxGDIObject class // Author: Julian Smart // Modified by: // Created: 01/02/97 // RCS-ID: $Id: gdiobj.cpp 40626 2006-08-16 14:53:49Z VS $ // Copyright: (c) Julian Smart // Licence: wxWindows licence ///////////////////////////////////////////////////////////////////////////// // For compilers that support precompilation, includes "wx.h". #include "wx/wxprec.h" #ifdef __BORLANDC__ #pragma hdrstop #endif #include "wx/gdiobj.h" #ifndef WX_PRECOMP #include <stdio.h> #include "wx/list.h" #include "wx/utils.h" #include "wx/app.h" #endif #include "wx/msw/private.h" #define M_GDIDATA wx_static_cast(wxGDIRefData*, m_refData) /* void wxGDIObject::IncrementResourceUsage(void) { if ( !M_GDIDATA ) return; // wxDebugMsg("Object %ld about to be incremented: %d\n", (long)this, m_usageCount); M_GDIDATA->m_usageCount ++; }; void wxGDIObject::DecrementResourceUsage(void) { if ( !M_GDIDATA ) return; M_GDIDATA->m_usageCount --; if (wxTheApp) wxTheApp->SetPendingCleanup(true); // wxDebugMsg("Object %ld decremented: %d\n", (long)this, M_GDIDATA->m_usageCount); if (M_GDIDATA->m_usageCount < 0) { char buf[80]; sprintf(buf, "Object %ld usage count is %d\n", (long)this, M_GDIDATA->m_usageCount); wxDebugMsg(buf); } // assert(M_GDIDATA->m_usageCount >= 0); }; */
radiaku/decoda
libs/wxWidgets/src/msw/gdiobj.cpp
C++
gpl-3.0
1,489
/* * * Copyright 2017 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package grpc import ( "context" "strings" "sync" "google.golang.org/grpc/balancer" "google.golang.org/grpc/connectivity" "google.golang.org/grpc/grpclog" "google.golang.org/grpc/resolver" ) type balancerWrapperBuilder struct { b Balancer // The v1 balancer. } func (bwb *balancerWrapperBuilder) Build(cc balancer.ClientConn, opts balancer.BuildOptions) balancer.Balancer { targetAddr := cc.Target() targetSplitted := strings.Split(targetAddr, ":///") if len(targetSplitted) >= 2 { targetAddr = targetSplitted[1] } bwb.b.Start(targetAddr, BalancerConfig{ DialCreds: opts.DialCreds, Dialer: opts.Dialer, }) _, pickfirst := bwb.b.(*pickFirst) bw := &balancerWrapper{ balancer: bwb.b, pickfirst: pickfirst, cc: cc, targetAddr: targetAddr, startCh: make(chan struct{}), conns: make(map[resolver.Address]balancer.SubConn), connSt: make(map[balancer.SubConn]*scState), csEvltr: &balancer.ConnectivityStateEvaluator{}, state: connectivity.Idle, } cc.UpdateBalancerState(connectivity.Idle, bw) go bw.lbWatcher() return bw } func (bwb *balancerWrapperBuilder) Name() string { return "wrapper" } type scState struct { addr Address // The v1 address type. s connectivity.State down func(error) } type balancerWrapper struct { balancer Balancer // The v1 balancer. pickfirst bool cc balancer.ClientConn targetAddr string // Target without the scheme. mu sync.Mutex conns map[resolver.Address]balancer.SubConn connSt map[balancer.SubConn]*scState // This channel is closed when handling the first resolver result. // lbWatcher blocks until this is closed, to avoid race between // - NewSubConn is created, cc wants to notify balancer of state changes; // - Build hasn't return, cc doesn't have access to balancer. startCh chan struct{} // To aggregate the connectivity state. csEvltr *balancer.ConnectivityStateEvaluator state connectivity.State } // lbWatcher watches the Notify channel of the balancer and manages // connections accordingly. func (bw *balancerWrapper) lbWatcher() { <-bw.startCh notifyCh := bw.balancer.Notify() if notifyCh == nil { // There's no resolver in the balancer. Connect directly. a := resolver.Address{ Addr: bw.targetAddr, Type: resolver.Backend, } sc, err := bw.cc.NewSubConn([]resolver.Address{a}, balancer.NewSubConnOptions{}) if err != nil { grpclog.Warningf("Error creating connection to %v. Err: %v", a, err) } else { bw.mu.Lock() bw.conns[a] = sc bw.connSt[sc] = &scState{ addr: Address{Addr: bw.targetAddr}, s: connectivity.Idle, } bw.mu.Unlock() sc.Connect() } return } for addrs := range notifyCh { grpclog.Infof("balancerWrapper: got update addr from Notify: %v\n", addrs) if bw.pickfirst { var ( oldA resolver.Address oldSC balancer.SubConn ) bw.mu.Lock() for oldA, oldSC = range bw.conns { break } bw.mu.Unlock() if len(addrs) <= 0 { if oldSC != nil { // Teardown old sc. bw.mu.Lock() delete(bw.conns, oldA) delete(bw.connSt, oldSC) bw.mu.Unlock() bw.cc.RemoveSubConn(oldSC) } continue } var newAddrs []resolver.Address for _, a := range addrs { newAddr := resolver.Address{ Addr: a.Addr, Type: resolver.Backend, // All addresses from balancer are all backends. ServerName: "", Metadata: a.Metadata, } newAddrs = append(newAddrs, newAddr) } if oldSC == nil { // Create new sc. sc, err := bw.cc.NewSubConn(newAddrs, balancer.NewSubConnOptions{}) if err != nil { grpclog.Warningf("Error creating connection to %v. Err: %v", newAddrs, err) } else { bw.mu.Lock() // For pickfirst, there should be only one SubConn, so the // address doesn't matter. All states updating (up and down) // and picking should all happen on that only SubConn. bw.conns[resolver.Address{}] = sc bw.connSt[sc] = &scState{ addr: addrs[0], // Use the first address. s: connectivity.Idle, } bw.mu.Unlock() sc.Connect() } } else { bw.mu.Lock() bw.connSt[oldSC].addr = addrs[0] bw.mu.Unlock() oldSC.UpdateAddresses(newAddrs) } } else { var ( add []resolver.Address // Addresses need to setup connections. del []balancer.SubConn // Connections need to tear down. ) resAddrs := make(map[resolver.Address]Address) for _, a := range addrs { resAddrs[resolver.Address{ Addr: a.Addr, Type: resolver.Backend, // All addresses from balancer are all backends. ServerName: "", Metadata: a.Metadata, }] = a } bw.mu.Lock() for a := range resAddrs { if _, ok := bw.conns[a]; !ok { add = append(add, a) } } for a, c := range bw.conns { if _, ok := resAddrs[a]; !ok { del = append(del, c) delete(bw.conns, a) // Keep the state of this sc in bw.connSt until its state becomes Shutdown. } } bw.mu.Unlock() for _, a := range add { sc, err := bw.cc.NewSubConn([]resolver.Address{a}, balancer.NewSubConnOptions{}) if err != nil { grpclog.Warningf("Error creating connection to %v. Err: %v", a, err) } else { bw.mu.Lock() bw.conns[a] = sc bw.connSt[sc] = &scState{ addr: resAddrs[a], s: connectivity.Idle, } bw.mu.Unlock() sc.Connect() } } for _, c := range del { bw.cc.RemoveSubConn(c) } } } } func (bw *balancerWrapper) HandleSubConnStateChange(sc balancer.SubConn, s connectivity.State) { bw.mu.Lock() defer bw.mu.Unlock() scSt, ok := bw.connSt[sc] if !ok { return } if s == connectivity.Idle { sc.Connect() } oldS := scSt.s scSt.s = s if oldS != connectivity.Ready && s == connectivity.Ready { scSt.down = bw.balancer.Up(scSt.addr) } else if oldS == connectivity.Ready && s != connectivity.Ready { if scSt.down != nil { scSt.down(errConnClosing) } } sa := bw.csEvltr.RecordTransition(oldS, s) if bw.state != sa { bw.state = sa } bw.cc.UpdateBalancerState(bw.state, bw) if s == connectivity.Shutdown { // Remove state for this sc. delete(bw.connSt, sc) } } func (bw *balancerWrapper) HandleResolvedAddrs([]resolver.Address, error) { bw.mu.Lock() defer bw.mu.Unlock() select { case <-bw.startCh: default: close(bw.startCh) } // There should be a resolver inside the balancer. // All updates here, if any, are ignored. } func (bw *balancerWrapper) Close() { bw.mu.Lock() defer bw.mu.Unlock() select { case <-bw.startCh: default: close(bw.startCh) } bw.balancer.Close() } // The picker is the balancerWrapper itself. // Pick should never return ErrNoSubConnAvailable. // It either blocks or returns error, consistent with v1 balancer Get(). func (bw *balancerWrapper) Pick(ctx context.Context, opts balancer.PickOptions) (balancer.SubConn, func(balancer.DoneInfo), error) { failfast := true // Default failfast is true. if ss, ok := rpcInfoFromContext(ctx); ok { failfast = ss.failfast } a, p, err := bw.balancer.Get(ctx, BalancerGetOptions{BlockingWait: !failfast}) if err != nil { return nil, nil, err } var done func(balancer.DoneInfo) if p != nil { done = func(i balancer.DoneInfo) { p() } } var sc balancer.SubConn bw.mu.Lock() defer bw.mu.Unlock() if bw.pickfirst { // Get the first sc in conns. for _, sc = range bw.conns { break } } else { var ok bool sc, ok = bw.conns[resolver.Address{ Addr: a.Addr, Type: resolver.Backend, ServerName: "", Metadata: a.Metadata, }] if !ok && failfast { return nil, nil, balancer.ErrTransientFailure } if s, ok := bw.connSt[sc]; failfast && (!ok || s.s != connectivity.Ready) { // If the returned sc is not ready and RPC is failfast, // return error, and this RPC will fail. return nil, nil, balancer.ErrTransientFailure } } return sc, done, nil }
drebes/terraform
vendor/google.golang.org/grpc/balancer_v1_wrapper.go
GO
mpl-2.0
8,557
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.script.FieldScript; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; public final class ScriptFieldsFetchSubPhase implements FetchSubPhase { @Override public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { if (context.hasScriptFields() == false) { return; } hits = hits.clone(); // don't modify the incoming hits Arrays.sort(hits, Comparator.comparingInt(SearchHit::docId)); int lastReaderId = -1; FieldScript[] leafScripts = null; List<ScriptFieldsContext.ScriptField> scriptFields = context.scriptFields().fields(); final IndexReader reader = context.searcher().getIndexReader(); for (SearchHit hit : hits) { int readerId = ReaderUtil.subIndex(hit.docId(), reader.leaves()); LeafReaderContext leafReaderContext = reader.leaves().get(readerId); if (readerId != lastReaderId) { leafScripts = createLeafScripts(leafReaderContext, scriptFields); lastReaderId = readerId; } int docId = hit.docId() - leafReaderContext.docBase; for (int i = 0; i < leafScripts.length; i++) { leafScripts[i].setDocument(docId); final Object value; try { value = leafScripts[i].execute(); CollectionUtils.ensureNoSelfReferences(value, "ScriptFieldsFetchSubPhase leaf script " + i); } catch (RuntimeException e) { if (scriptFields.get(i).ignoreException()) { continue; } throw e; } if (hit.fieldsOrNull() == null) { hit.fields(new HashMap<>(2)); } String scriptFieldName = scriptFields.get(i).name(); DocumentField hitField = hit.getFields().get(scriptFieldName); if (hitField == null) { final List<Object> values; if (value instanceof Collection) { values = new ArrayList<>((Collection<?>) value); } else { values = Collections.singletonList(value); } hitField = new DocumentField(scriptFieldName, values); hit.getFields().put(scriptFieldName, hitField); } } } } private FieldScript[] createLeafScripts(LeafReaderContext context, List<ScriptFieldsContext.ScriptField> scriptFields) { FieldScript[] scripts = new FieldScript[scriptFields.size()]; for (int i = 0; i < scripts.length; i++) { try { scripts[i] = scriptFields.get(i).script().newInstance(context); } catch (IOException e1) { throw new IllegalStateException("Failed to load script " + scriptFields.get(i).name(), e1); } } return scripts; } }
coding0011/elasticsearch
server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java
Java
apache-2.0
4,508
module.exports = require("./mime-functions"); module.exports.contentTypes = require("./content-types");
stephentcannon/anonistreamr
public/node_modules/nodemailer/node_modules/mailcomposer/node_modules/mimelib-noiconv/index.js
JavaScript
apache-2.0
104
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package upgrade import ( "fmt" "io/ioutil" "os" "path/filepath" "time" apierrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/util/errors" clientset "k8s.io/client-go/kubernetes" certutil "k8s.io/client-go/util/cert" kubeadmapi "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm" kubeadmapiv1alpha3 "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm/v1alpha3" kubeadmconstants "k8s.io/kubernetes/cmd/kubeadm/app/constants" "k8s.io/kubernetes/cmd/kubeadm/app/features" "k8s.io/kubernetes/cmd/kubeadm/app/phases/addons/dns" "k8s.io/kubernetes/cmd/kubeadm/app/phases/addons/proxy" "k8s.io/kubernetes/cmd/kubeadm/app/phases/bootstraptoken/clusterinfo" nodebootstraptoken "k8s.io/kubernetes/cmd/kubeadm/app/phases/bootstraptoken/node" certsphase "k8s.io/kubernetes/cmd/kubeadm/app/phases/certs" kubeletphase "k8s.io/kubernetes/cmd/kubeadm/app/phases/kubelet" patchnodephase "k8s.io/kubernetes/cmd/kubeadm/app/phases/patchnode" "k8s.io/kubernetes/cmd/kubeadm/app/phases/selfhosting" "k8s.io/kubernetes/cmd/kubeadm/app/phases/uploadconfig" "k8s.io/kubernetes/cmd/kubeadm/app/util/apiclient" dryrunutil "k8s.io/kubernetes/cmd/kubeadm/app/util/dryrun" "k8s.io/kubernetes/pkg/util/version" ) var expiry = 180 * 24 * time.Hour // PerformPostUpgradeTasks runs nearly the same functions as 'kubeadm init' would do // Note that the markmaster phase is left out, not needed, and no token is created as that doesn't belong to the upgrade func PerformPostUpgradeTasks(client clientset.Interface, cfg *kubeadmapi.InitConfiguration, newK8sVer *version.Version, dryRun bool) error { errs := []error{} // Upload currently used configuration to the cluster // Note: This is done right in the beginning of cluster initialization; as we might want to make other phases // depend on centralized information from this source in the future if err := uploadconfig.UploadConfiguration(cfg, client); err != nil { errs = append(errs, err) } // Create the new, version-branched kubelet ComponentConfig ConfigMap if err := kubeletphase.CreateConfigMap(cfg, client); err != nil { errs = append(errs, fmt.Errorf("error creating kubelet configuration ConfigMap: %v", err)) } // Write the new kubelet config down to disk and the env file if needed if err := writeKubeletConfigFiles(client, cfg, newK8sVer, dryRun); err != nil { errs = append(errs, err) } // Annotate the node with the crisocket information, sourced either from the InitConfiguration struct or // --cri-socket. // TODO: In the future we want to use something more official like NodeStatus or similar for detecting this properly if err := patchnodephase.AnnotateCRISocket(client, cfg.NodeRegistration.Name, cfg.NodeRegistration.CRISocket); err != nil { errs = append(errs, fmt.Errorf("error uploading crisocket: %v", err)) } // Create/update RBAC rules that makes the bootstrap tokens able to post CSRs if err := nodebootstraptoken.AllowBootstrapTokensToPostCSRs(client); err != nil { errs = append(errs, err) } // Create/update RBAC rules that makes the bootstrap tokens able to get their CSRs approved automatically if err := nodebootstraptoken.AutoApproveNodeBootstrapTokens(client); err != nil { errs = append(errs, err) } // Create/update RBAC rules that makes the nodes to rotate certificates and get their CSRs approved automatically if err := nodebootstraptoken.AutoApproveNodeCertificateRotation(client); err != nil { errs = append(errs, err) } // Upgrade to a self-hosted control plane if possible if err := upgradeToSelfHosting(client, cfg, dryRun); err != nil { errs = append(errs, err) } // TODO: Is this needed to do here? I think that updating cluster info should probably be separate from a normal upgrade // Create the cluster-info ConfigMap with the associated RBAC rules // if err := clusterinfo.CreateBootstrapConfigMapIfNotExists(client, kubeadmconstants.GetAdminKubeConfigPath()); err != nil { // return err //} // Create/update RBAC rules that makes the cluster-info ConfigMap reachable if err := clusterinfo.CreateClusterInfoRBACRules(client); err != nil { errs = append(errs, err) } // Rotate the kube-apiserver cert and key if needed if err := backupAPIServerCertIfNeeded(cfg, dryRun); err != nil { errs = append(errs, err) } // Upgrade kube-dns/CoreDNS and kube-proxy if err := dns.EnsureDNSAddon(cfg, client); err != nil { errs = append(errs, err) } // Remove the old DNS deployment if a new DNS service is now used (kube-dns to CoreDNS or vice versa) if err := removeOldDNSDeploymentIfAnotherDNSIsUsed(cfg, client, dryRun); err != nil { errs = append(errs, err) } if err := proxy.EnsureProxyAddon(cfg, client); err != nil { errs = append(errs, err) } return errors.NewAggregate(errs) } func removeOldDNSDeploymentIfAnotherDNSIsUsed(cfg *kubeadmapi.InitConfiguration, client clientset.Interface, dryRun bool) error { return apiclient.TryRunCommand(func() error { installedDeploymentName := kubeadmconstants.KubeDNS deploymentToDelete := kubeadmconstants.CoreDNS if features.Enabled(cfg.FeatureGates, features.CoreDNS) { installedDeploymentName = kubeadmconstants.CoreDNS deploymentToDelete = kubeadmconstants.KubeDNS } // If we're dry-running, we don't need to wait for the new DNS addon to become ready if !dryRun { dnsDeployment, err := client.AppsV1().Deployments(metav1.NamespaceSystem).Get(installedDeploymentName, metav1.GetOptions{}) if err != nil { return err } if dnsDeployment.Status.ReadyReplicas == 0 { return fmt.Errorf("the DNS deployment isn't ready yet") } } // We don't want to wait for the DNS deployment above to become ready when dryrunning (as it never will) // but here we should execute the DELETE command against the dryrun clientset, as it will only be logged err := apiclient.DeleteDeploymentForeground(client, metav1.NamespaceSystem, deploymentToDelete) if err != nil && !apierrors.IsNotFound(err) { return err } return nil }, 10) } func upgradeToSelfHosting(client clientset.Interface, cfg *kubeadmapi.InitConfiguration, dryRun bool) error { if features.Enabled(cfg.FeatureGates, features.SelfHosting) && !IsControlPlaneSelfHosted(client) { waiter := getWaiter(dryRun, client) // kubeadm will now convert the static Pod-hosted control plane into a self-hosted one fmt.Println("[self-hosted] Creating self-hosted control plane.") if err := selfhosting.CreateSelfHostedControlPlane(kubeadmconstants.GetStaticPodDirectory(), kubeadmconstants.KubernetesDir, cfg, client, waiter, dryRun); err != nil { return fmt.Errorf("error creating self hosted control plane: %v", err) } } return nil } func backupAPIServerCertIfNeeded(cfg *kubeadmapi.InitConfiguration, dryRun bool) error { certAndKeyDir := kubeadmapiv1alpha3.DefaultCertificatesDir shouldBackup, err := shouldBackupAPIServerCertAndKey(certAndKeyDir) if err != nil { // Don't fail the upgrade phase if failing to determine to backup kube-apiserver cert and key. return fmt.Errorf("[postupgrade] WARNING: failed to determine to backup kube-apiserver cert and key: %v", err) } if !shouldBackup { return nil } // If dry-running, just say that this would happen to the user and exit if dryRun { fmt.Println("[postupgrade] Would rotate the API server certificate and key.") return nil } // Don't fail the upgrade phase if failing to backup kube-apiserver cert and key, just continue rotating the cert // TODO: We might want to reconsider this choice. if err := backupAPIServerCertAndKey(certAndKeyDir); err != nil { fmt.Printf("[postupgrade] WARNING: failed to backup kube-apiserver cert and key: %v", err) } return certsphase.CreateAPIServerCertAndKeyFiles(cfg) } func writeKubeletConfigFiles(client clientset.Interface, cfg *kubeadmapi.InitConfiguration, newK8sVer *version.Version, dryRun bool) error { kubeletDir, err := getKubeletDir(dryRun) if err != nil { // The error here should never occur in reality, would only be thrown if /tmp doesn't exist on the machine. return err } errs := []error{} // Write the configuration for the kubelet down to disk so the upgraded kubelet can start with fresh config if err := kubeletphase.DownloadConfig(client, newK8sVer, kubeletDir); err != nil { // Tolerate the error being NotFound when dryrunning, as there is a pretty common scenario: the dryrun process // *would* post the new kubelet-config-1.X configmap that doesn't exist now when we're trying to download it // again. if !(apierrors.IsNotFound(err) && dryRun) { errs = append(errs, fmt.Errorf("error downloading kubelet configuration from the ConfigMap: %v", err)) } } if dryRun { // Print what contents would be written dryrunutil.PrintDryRunFile(kubeadmconstants.KubeletConfigurationFileName, kubeletDir, kubeadmconstants.KubeletRunDirectory, os.Stdout) } envFilePath := filepath.Join(kubeadmconstants.KubeletRunDirectory, kubeadmconstants.KubeletEnvFileName) if _, err := os.Stat(envFilePath); os.IsNotExist(err) { // Write env file with flags for the kubelet to use. We do not need to write the --register-with-taints for the master, // as we handle that ourselves in the markmaster phase // TODO: Maybe we want to do that some time in the future, in order to remove some logic from the markmaster phase? if err := kubeletphase.WriteKubeletDynamicEnvFile(&cfg.NodeRegistration, cfg.FeatureGates, false, kubeletDir); err != nil { errs = append(errs, fmt.Errorf("error writing a dynamic environment file for the kubelet: %v", err)) } if dryRun { // Print what contents would be written dryrunutil.PrintDryRunFile(kubeadmconstants.KubeletEnvFileName, kubeletDir, kubeadmconstants.KubeletRunDirectory, os.Stdout) } } return errors.NewAggregate(errs) } // getWaiter gets the right waiter implementation for the right occasion // TODO: Consolidate this with what's in init.go? func getWaiter(dryRun bool, client clientset.Interface) apiclient.Waiter { if dryRun { return dryrunutil.NewWaiter() } return apiclient.NewKubeWaiter(client, 30*time.Minute, os.Stdout) } // getKubeletDir gets the kubelet directory based on whether the user is dry-running this command or not. // TODO: Consolidate this with similar funcs? func getKubeletDir(dryRun bool) (string, error) { if dryRun { return ioutil.TempDir("", "kubeadm-upgrade-dryrun") } return kubeadmconstants.KubeletRunDirectory, nil } // backupAPIServerCertAndKey backups the old cert and key of kube-apiserver to a specified directory. func backupAPIServerCertAndKey(certAndKeyDir string) error { subDir := filepath.Join(certAndKeyDir, "expired") if err := os.Mkdir(subDir, 0766); err != nil { return fmt.Errorf("failed to created backup directory %s: %v", subDir, err) } filesToMove := map[string]string{ filepath.Join(certAndKeyDir, kubeadmconstants.APIServerCertName): filepath.Join(subDir, kubeadmconstants.APIServerCertName), filepath.Join(certAndKeyDir, kubeadmconstants.APIServerKeyName): filepath.Join(subDir, kubeadmconstants.APIServerKeyName), } return moveFiles(filesToMove) } // moveFiles moves files from one directory to another. func moveFiles(files map[string]string) error { filesToRecover := map[string]string{} for from, to := range files { if err := os.Rename(from, to); err != nil { return rollbackFiles(filesToRecover, err) } filesToRecover[to] = from } return nil } // rollbackFiles moves the files back to the original directory. func rollbackFiles(files map[string]string, originalErr error) error { errs := []error{originalErr} for from, to := range files { if err := os.Rename(from, to); err != nil { errs = append(errs, err) } } return fmt.Errorf("couldn't move these files: %v. Got errors: %v", files, errors.NewAggregate(errs)) } // shouldBackupAPIServerCertAndKey checks if the cert of kube-apiserver will be expired in 180 days. func shouldBackupAPIServerCertAndKey(certAndKeyDir string) (bool, error) { apiServerCert := filepath.Join(certAndKeyDir, kubeadmconstants.APIServerCertName) certs, err := certutil.CertsFromFile(apiServerCert) if err != nil { return false, fmt.Errorf("couldn't load the certificate file %s: %v", apiServerCert, err) } if len(certs) == 0 { return false, fmt.Errorf("no certificate data found") } if time.Now().Sub(certs[0].NotBefore) > expiry { return true, nil } return false, nil }
abgworrall/kubernetes
cmd/kubeadm/app/phases/upgrade/postupgrade.go
GO
apache-2.0
13,012
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.lang.resolve.ast.builder.strategy; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.impl.light.LightMethodBuilder; import com.intellij.psi.impl.light.LightPsiClassBuilder; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod; import org.jetbrains.plugins.groovy.lang.psi.impl.PsiImplUtil; import org.jetbrains.plugins.groovy.lang.resolve.ast.builder.BuilderAnnotationContributor; import org.jetbrains.plugins.groovy.lang.resolve.ast.builder.BuilderHelperLightPsiClass; import org.jetbrains.plugins.groovy.transformations.TransformationContext; import java.util.Objects; import static org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.TypesUtil.createType; public class DefaultBuilderStrategySupport extends BuilderAnnotationContributor { public static final String DEFAULT_STRATEGY_NAME = "DefaultStrategy"; @Override public void applyTransformation(@NotNull TransformationContext context) { new DefaultBuilderStrategyHandler(context).doProcess(); } private static class DefaultBuilderStrategyHandler { private final @NotNull TransformationContext myContext; private final @NotNull GrTypeDefinition myContainingClass; private DefaultBuilderStrategyHandler(@NotNull TransformationContext context) { myContext = context; myContainingClass = context.getCodeClass(); } public void doProcess() { processTypeDefinition(); processMethods(); } private void processTypeDefinition() { final PsiAnnotation builderAnno = PsiImplUtil.getAnnotation(myContainingClass, BUILDER_FQN); if (!isApplicable(builderAnno, DEFAULT_STRATEGY_NAME)) return; boolean includeSuper = isIncludeSuperProperties(builderAnno); final PsiClass builderClass = createBuilderClass(builderAnno, getFields(myContext, includeSuper)); myContext.addMethod(createBuilderMethod(builderClass, builderAnno)); myContext.addInnerClass(builderClass); } @NotNull private LightPsiClassBuilder createBuilderClass(@NotNull final PsiAnnotation annotation, @NotNull PsiVariable[] setters) { return createBuilderClass(annotation, setters, null); } @NotNull private LightPsiClassBuilder createBuilderClass(@NotNull final PsiAnnotation annotation, @NotNull PsiVariable[] setters, @Nullable PsiType builtType) { final LightPsiClassBuilder builderClass = new BuilderHelperLightPsiClass( myContainingClass, getBuilderClassName(annotation, myContainingClass) ); for (PsiVariable field : setters) { LightMethodBuilder setter = createFieldSetter(builderClass, field, annotation); builderClass.addMethod(setter); } final LightMethodBuilder buildMethod = createBuildMethod( annotation, builtType == null ? createType(myContainingClass) : builtType ); return builderClass.addMethod(buildMethod); } @NotNull private LightMethodBuilder createBuilderMethod(@NotNull PsiClass builderClass, @NotNull PsiAnnotation annotation) { final LightMethodBuilder builderMethod = new LightMethodBuilder(myContext.getManager(), getBuilderMethodName(annotation)); builderMethod.addModifier(PsiModifier.STATIC); builderMethod.setOriginInfo(ORIGIN_INFO); builderMethod.setNavigationElement(annotation); builderMethod.setMethodReturnType(createType(builderClass)); return builderMethod; } private void processMethods() { for (GrMethod method : myContext.getCodeClass().getCodeMethods()) { processMethod(method); } } private void processMethod(@NotNull GrMethod method) { final PsiAnnotation annotation = PsiImplUtil.getAnnotation(method, BUILDER_FQN); if (!isApplicable(annotation, DEFAULT_STRATEGY_NAME)) return; if (method.isConstructor()) { processConstructor(method, annotation); } else if (method.hasModifierProperty(PsiModifier.STATIC)) { processFactoryMethod(method, annotation); } } private void processConstructor(@NotNull GrMethod method, PsiAnnotation annotation) { PsiClass builderClass = createBuilderClass(annotation, method.getParameters()); myContext.addMethod(createBuilderMethod(builderClass, annotation)); myContext.addInnerClass(builderClass); } private void processFactoryMethod(@NotNull GrMethod method, PsiAnnotation annotation) { PsiClass builderClass = createBuilderClass(annotation, method.getParameters(), method.getReturnType()); myContext.addMethod(createBuilderMethod(builderClass, annotation)); myContext.addInnerClass(builderClass); } @NotNull private static String getBuilderMethodName(@NotNull PsiAnnotation annotation) { final String builderMethodName = AnnotationUtil.getDeclaredStringAttributeValue(annotation, "builderMethodName"); return StringUtil.isEmpty(builderMethodName) ? "builder" : builderMethodName; } } @NotNull public static String getBuilderClassName(@NotNull PsiAnnotation annotation, @NotNull GrTypeDefinition clazz) { final String builderClassName = AnnotationUtil.getDeclaredStringAttributeValue(annotation, "builderClassName"); return builderClassName == null ? String.format("%s%s", clazz.getName(), "Builder") : builderClassName; } @NotNull public static LightMethodBuilder createBuildMethod(@NotNull PsiAnnotation annotation, @NotNull PsiType builtType) { final LightMethodBuilder buildMethod = new LightMethodBuilder(annotation.getManager(), getBuildMethodName(annotation)); buildMethod.setOriginInfo(ORIGIN_INFO); buildMethod.setMethodReturnType(builtType); return buildMethod; } @NotNull public static LightMethodBuilder createFieldSetter(@NotNull PsiClass builderClass, @NotNull PsiVariable field, @NotNull PsiAnnotation annotation) { String name = Objects.requireNonNull(field.getName()); return createFieldSetter(builderClass, name, field.getType(), annotation, field); } @NotNull public static LightMethodBuilder createFieldSetter(@NotNull PsiClass builderClass, @NotNull String name, @NotNull PsiType type, @NotNull PsiAnnotation annotation, @NotNull PsiElement navigationElement) { final LightMethodBuilder fieldSetter = new LightMethodBuilder(builderClass.getManager(), getFieldMethodName(annotation, name)); fieldSetter.addModifier(PsiModifier.PUBLIC); fieldSetter.addParameter(name, type); fieldSetter.setContainingClass(builderClass); fieldSetter.setMethodReturnType(JavaPsiFacade.getElementFactory(builderClass.getProject()).createType(builderClass)); fieldSetter.setNavigationElement(navigationElement); fieldSetter.setOriginInfo(ORIGIN_INFO); return fieldSetter; } @NotNull public static String getFieldMethodName(@NotNull PsiAnnotation annotation, @NotNull String fieldName) { final String prefix = AnnotationUtil.getDeclaredStringAttributeValue(annotation, "prefix"); return StringUtil.isEmpty(prefix) ? fieldName : String.format("%s%s", prefix, StringUtil.capitalize(fieldName)); } @NotNull private static String getBuildMethodName(@NotNull PsiAnnotation annotation) { final String buildMethodName = AnnotationUtil.getDeclaredStringAttributeValue(annotation, "buildMethodName"); return StringUtil.isEmpty(buildMethodName) ? "build" : buildMethodName; } }
goodwinnk/intellij-community
plugins/groovy/groovy-psi/src/org/jetbrains/plugins/groovy/lang/resolve/ast/builder/strategy/DefaultBuilderStrategySupport.java
Java
apache-2.0
8,689
/* ******************************************************************************* * Copyright (C) 2002-2012, International Business Machines Corporation and * * others. All Rights Reserved. * ******************************************************************************* */ package com.ibm.icu.dev.util; import java.util.Collection; import java.util.Iterator; import java.util.Map; import com.ibm.icu.text.UnicodeSet; import com.ibm.icu.text.UnicodeSetIterator; public abstract class Visitor { public void doAt(Object item) { if (item instanceof Collection) { doAt((Collection) item); } else if (item instanceof Map) { doAt((Map) item); } else if (item instanceof Object[]) { doAt((Object[]) item); } else if (item instanceof UnicodeSet) { doAt((UnicodeSet) item); } else { doSimpleAt(item); } } public int count(Object item) { if (item instanceof Collection) { return ((Collection) item).size(); } else if (item instanceof Map) { return ((Map) item).size(); } else if (item instanceof Object[]) { return ((Object[]) item).length; } else if (item instanceof UnicodeSet) { return ((UnicodeSet) item).size(); } else { return 1; } } // the default implementation boxing public void doAt(int o) { doSimpleAt(new Integer(o)); } public void doAt(double o) { doSimpleAt(new Double(o)); } public void doAt(char o) { doSimpleAt(new Character(o)); } // for subclassing protected void doAt (Collection c) { if (c.size() == 0) doBefore(c, null); Iterator it = c.iterator(); boolean first = true; Object last = null; while (it.hasNext()) { Object item = it.next(); if (first) { doBefore(c, item); first = false; } else { doBetween(c, last, item); } doAt(last=item); } doAfter(c, last); } protected void doAt (Map c) { doAt(c.entrySet()); } protected void doAt (UnicodeSet c) { if (c.size() == 0) doBefore(c, null); UnicodeSetIterator it = new UnicodeSetIterator(c); boolean first = true; Object last = null; Object item; CodePointRange cpr0 = new CodePointRange(); CodePointRange cpr1 = new CodePointRange(); CodePointRange cpr; while(it.nextRange()) { if (it.codepoint == UnicodeSetIterator.IS_STRING) { item = it.string; } else { cpr = last == cpr0 ? cpr1 : cpr0; // make sure we don't override last cpr.codepoint = it.codepoint; cpr.codepointEnd = it.codepointEnd; item = cpr; } if (!first) { doBefore(c, item); first = true; } else { doBetween(c, last, item); } doAt(last = item); } doAfter(c, last); } protected void doAt (Object[] c) { doBefore(c, c.length == 0 ? null : c[0]); Object last = null; for (int i = 0; i < c.length; ++i) { if (i != 0) doBetween(c, last, c[i]); doAt(last = c[i]); } doAfter(c, last); } public static class CodePointRange{ public int codepoint, codepointEnd; } // ===== MUST BE OVERRIDEN ===== abstract protected void doBefore(Object container, Object item); abstract protected void doBetween(Object container, Object lastItem, Object nextItem); abstract protected void doAfter(Object container, Object item); abstract protected void doSimpleAt(Object o); }
nightauer/quickdic-dictionary.dictionary
jars/icu4j-52_1/main/tests/framework/src/com/ibm/icu/dev/util/Visitor.java
Java
apache-2.0
4,008
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.web; import org.springframework.beans.factory.annotation.Value; /** * Configuration properties for web error handling. * * @author Michael Stummvoll * @author Stephane Nicoll * @author Vedran Pavic * @since 1.3.0 */ public class ErrorProperties { /** * Path of the error controller. */ @Value("${error.path:/error}") private String path = "/error"; /** * Include the "exception" attribute. */ private boolean includeException; /** * When to include a "stacktrace" attribute. */ private IncludeStacktrace includeStacktrace = IncludeStacktrace.NEVER; public String getPath() { return this.path; } public void setPath(String path) { this.path = path; } public boolean isIncludeException() { return this.includeException; } public void setIncludeException(boolean includeException) { this.includeException = includeException; } public IncludeStacktrace getIncludeStacktrace() { return this.includeStacktrace; } public void setIncludeStacktrace(IncludeStacktrace includeStacktrace) { this.includeStacktrace = includeStacktrace; } /** * Include Stacktrace attribute options. */ public enum IncludeStacktrace { /** * Never add stacktrace information. */ NEVER, /** * Always add stacktrace information. */ ALWAYS, /** * Add stacktrace information when the "trace" request parameter is "true". */ ON_TRACE_PARAM } }
bbrouwer/spring-boot
spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/ErrorProperties.java
Java
apache-2.0
2,080
using NUnit.Framework; using Nest.Tests.MockData.Domain; namespace Nest.Tests.Unit.Search.Query.Singles { [TestFixture] public class ConditionlessQueryJson { [Test] public void FallbackTerm() { var s = new SearchDescriptor<ElasticsearchProject>().From(0).Size(10) .Query(q=>q .Conditionless(qs=>qs .Query(qcq=>qcq.Term("this_term_is_conditionless", "")) .Fallback(qcf=>qcf.Term("name", "do_me_instead") ) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { term : { name : { value : ""do_me_instead"" } } } }"; Assert.True(json.JsonEquals(expected), json); } [Test] public void FallbackMatch() { var s = new SearchDescriptor<ElasticsearchProject>().From(0).Size(10) .Query(q => q .Conditionless(qs => qs .Query(qcq => qcq .Match(m => m .OnField(p => p.Name) .Query("") ) ) .Fallback(qcf=>qcf .Match(m => m .OnField(p => p.Name) .Query("do_me_instead") ) ) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { match : { name : { query: ""do_me_instead"" } } } }"; Assert.True(json.JsonEquals(expected), json); } [Test] public void UseQuery() { var s = new SearchDescriptor<ElasticsearchProject>().From(0).Size(10) .Query(q => q .Conditionless(qs => qs .Query(qcq => qcq.Term("name", "NEST")) .Fallback(qcf => qcf.Term("name", "do_me_instead") ) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10, query : { term : { name : { value : ""NEST"" } } } }"; Assert.True(json.JsonEquals(expected), json); } [Test] public void BothConditionless() { var s = new SearchDescriptor<ElasticsearchProject>().From(0).Size(10) .Query(q => q .Conditionless(qs => qs .Query(qcq => qcq.Term("name", "")) .Fallback(qcf => qcf.Term("name", "") ) ) ); var json = TestElasticClient.Serialize(s); var expected = @"{ from: 0, size: 10 }"; Assert.True(json.JsonEquals(expected), json); } } }
joehmchan/elasticsearch-net
src/Tests/Nest.Tests.Unit/Search/Query/Modes/ConditionlessQueryJson.cs
C#
apache-2.0
2,302
require "formula" class Xplanetfx < Formula desc "Configure, run or daemonize xplanet for HQ Earth wallpapers" homepage "http://mein-neues-blog.de/xplanetFX/" url "http://repository.mein-neues-blog.de:9000/archive/xplanetfx-2.6.6_all.tar.gz" sha256 "59c49af68b6cafcbe4ebfd65979181a7f1e4416e024505b5b0d46f1cc04b082a" version "2.6.6" bottle do cellar :any sha256 "ec54be513691a25a873f0f59da03a20843670885bac4c2626a526a5e57c2e501" => :yosemite sha256 "61be399a9f715a4541592e819963d24d41d739b9f57a6fc5f012fc4802627dda" => :mavericks sha256 "37b09a20a17d6e713a662a83c5e17c782a25af167b0b2ac161c48b0bd3b1b9e0" => :mountain_lion end option "without-gui", "Build to run xplanetFX from the command-line only" option "with-gnu-sed", "Build to use GNU sed instead of OS X sed" depends_on "xplanet" depends_on "imagemagick" depends_on "wget" depends_on "coreutils" depends_on "gnu-sed" => :optional if build.with? "gui" depends_on "librsvg" depends_on "pygtk" => "with-libglade" end skip_clean "share/xplanetFX" def install inreplace "bin/xplanetFX", "WORKDIR=/usr/share/xplanetFX", "WORKDIR=#{HOMEBREW_PREFIX}/share/xplanetFX" prefix.install "bin", "share" path = "#{Formula["coreutils"].opt_libexec}/gnubin" path += ":#{Formula["gnu-sed"].opt_libexec}/gnubin" if build.with?("gnu-sed") if build.with?("gui") ENV.prepend_create_path "PYTHONPATH", "#{HOMEBREW_PREFIX}/lib/python2.7/site-packages/gtk-2.0" ENV.prepend_create_path "GDK_PIXBUF_MODULEDIR", "#{HOMEBREW_PREFIX}/lib/gdk-pixbuf-2.0/2.10.0/loaders" end bin.env_script_all_files(libexec+'bin', :PATH => "#{path}:$PATH", :PYTHONPATH => ENV["PYTHONPATH"], :GDK_PIXBUF_MODULEDIR => ENV["GDK_PIXBUF_MODULEDIR"]) end def post_install if build.with?("gui") # Change the version directory below with any future update ENV["GDK_PIXBUF_MODULEDIR"]="#{HOMEBREW_PREFIX}/lib/gdk-pixbuf-2.0/2.10.0/loaders" system "#{HOMEBREW_PREFIX}/bin/gdk-pixbuf-query-loaders", "--update-cache" end end end
karlhigley/homebrew
Library/Formula/xplanetfx.rb
Ruby
bsd-2-clause
2,068
/** * @license Highstock JS v2.1.4 (2015-03-10) * Plugin for displaying a message when there is no data visible in chart. * * (c) 2010-2014 Highsoft AS * Author: Oystein Moseng * * License: www.highcharts.com/license */ (function (H) { var seriesTypes = H.seriesTypes, chartPrototype = H.Chart.prototype, defaultOptions = H.getOptions(), extend = H.extend, each = H.each; // Add language option extend(defaultOptions.lang, { noData: 'No data to display' }); // Add default display options for message defaultOptions.noData = { position: { x: 0, y: 0, align: 'center', verticalAlign: 'middle' }, attr: { }, style: { fontWeight: 'bold', fontSize: '12px', color: '#60606a' } }; /** * Define hasData functions for series. These return true if there are data points on this series within the plot area */ function hasDataPie() { return !!this.points.length; /* != 0 */ } each(['pie', 'gauge', 'waterfall', 'bubble'], function (type) { if (seriesTypes[type]) { seriesTypes[type].prototype.hasData = hasDataPie; } }); H.Series.prototype.hasData = function () { return this.visible && this.dataMax !== undefined && this.dataMin !== undefined; // #3703 }; /** * Display a no-data message. * * @param {String} str An optional message to show in place of the default one */ chartPrototype.showNoData = function (str) { var chart = this, options = chart.options, text = str || options.lang.noData, noDataOptions = options.noData; if (!chart.noDataLabel) { chart.noDataLabel = chart.renderer.label(text, 0, 0, null, null, null, null, null, 'no-data') .attr(noDataOptions.attr) .css(noDataOptions.style) .add(); chart.noDataLabel.align(extend(chart.noDataLabel.getBBox(), noDataOptions.position), false, 'plotBox'); } }; /** * Hide no-data message */ chartPrototype.hideNoData = function () { var chart = this; if (chart.noDataLabel) { chart.noDataLabel = chart.noDataLabel.destroy(); } }; /** * Returns true if there are data points within the plot area now */ chartPrototype.hasData = function () { var chart = this, series = chart.series, i = series.length; while (i--) { if (series[i].hasData() && !series[i].options.isInternal) { return true; } } return false; }; /** * Show no-data message if there is no data in sight. Otherwise, hide it. */ function handleNoData() { var chart = this; if (chart.hasData()) { chart.hideNoData(); } else { chart.showNoData(); } } /** * Add event listener to handle automatic display of no-data message */ chartPrototype.callbacks.push(function (chart) { H.addEvent(chart, 'load', handleNoData); H.addEvent(chart, 'redraw', handleNoData); }); }(Highcharts));
syscart/syscart
web/media/js/jquery/plugins/Highstock/2.1.4/js/modules/no-data-to-display.src.js
JavaScript
gpl-2.0
2,826
#!/usr/bin/env node /* global cat:true, cd:true, echo:true, exec:true, exit:true */ // Usage: // stable release: node release.js // pre-release: node release.js --pre-release {version} // test run: node release.js --remote={repo} // - repo: "/tmp/repo" (filesystem), "user/repo" (github), "http://mydomain/repo.git" (another domain) "use strict"; var baseDir, downloadBuilder, repoDir, prevVersion, newVersion, nextVersion, tagTime, preRelease, repo, fs = require( "fs" ), path = require( "path" ), rnewline = /\r?\n/, branch = "master"; walk([ bootstrap, section( "setting up repo" ), cloneRepo, checkState, section( "calculating versions" ), getVersions, confirm, section( "building release" ), buildReleaseBranch, buildPackage, section( "pushing tag" ), confirmReview, pushRelease, section( "updating branch version" ), updateBranchVersion, section( "pushing " + branch ), confirmReview, pushBranch, section( "generating changelog" ), generateChangelog, section( "gathering contributors" ), gatherContributors, section( "updating trac" ), updateTrac, confirm ]); function cloneRepo() { echo( "Cloning " + repo.cyan + "..." ); git( "clone " + repo + " " + repoDir, "Error cloning repo." ); cd( repoDir ); echo( "Checking out " + branch.cyan + " branch..." ); git( "checkout " + branch, "Error checking out branch." ); echo(); echo( "Installing dependencies..." ); if ( exec( "npm install" ).code !== 0 ) { abort( "Error installing dependencies." ); } echo(); } function checkState() { echo( "Checking AUTHORS.txt..." ); var result, lastActualAuthor, lastListedAuthor = cat( "AUTHORS.txt" ).trim().split( rnewline ).pop(); result = exec( "grunt authors", { silent: true }); if ( result.code !== 0 ) { abort( "Error getting list of authors." ); } lastActualAuthor = result.output.split( rnewline ).splice( -4, 1 )[ 0 ]; if ( lastListedAuthor !== lastActualAuthor ) { echo( "Last listed author is " + lastListedAuthor.red + "." ); echo( "Last actual author is " + lastActualAuthor.green + "." ); abort( "Please update AUTHORS.txt." ); } echo( "Last listed author (" + lastListedAuthor.cyan + ") is correct." ); } function getVersions() { // prevVersion, newVersion, nextVersion are defined in the parent scope var parts, major, minor, patch, currentVersion = readPackage().version; echo( "Validating current version..." ); if ( currentVersion.substr( -3, 3 ) !== "pre" ) { echo( "The current version is " + currentVersion.red + "." ); abort( "The version must be a pre version." ); } if ( preRelease ) { newVersion = preRelease; // Note: prevVersion is not currently used for pre-releases. prevVersion = nextVersion = currentVersion; } else { newVersion = currentVersion.substr( 0, currentVersion.length - 3 ); parts = newVersion.split( "." ); major = parseInt( parts[ 0 ], 10 ); minor = parseInt( parts[ 1 ], 10 ); patch = parseInt( parts[ 2 ], 10 ); if ( minor === 0 && patch === 0 ) { abort( "This script is not smart enough to handle major release (eg. 2.0.0)." ); } else if ( patch === 0 ) { prevVersion = git( "for-each-ref --count=1 --sort=-authordate --format='%(refname:short)' refs/tags/" + [ major, minor - 1 ].join( "." ) + "*" ).trim(); } else { prevVersion = [ major, minor, patch - 1 ].join( "." ); } nextVersion = [ major, minor, patch + 1 ].join( "." ) + "pre"; } echo( "We are going from " + prevVersion.cyan + " to " + newVersion.cyan + "." ); echo( "After the release, the version will be " + nextVersion.cyan + "." ); } function buildReleaseBranch() { var pkg; echo( "Creating " + "release".cyan + " branch..." ); git( "checkout -b release", "Error creating release branch." ); echo(); echo( "Updating package.json..." ); pkg = readPackage(); pkg.version = newVersion; pkg.author.url = pkg.author.url.replace( "master", newVersion ); pkg.licenses.forEach(function( license ) { license.url = license.url.replace( "master", newVersion ); }); writePackage( pkg ); echo( "Generating manifest files..." ); if ( exec( "grunt manifest" ).code !== 0 ) { abort( "Error generating manifest files." ); } echo(); echo( "Committing release artifacts..." ); git( "add *.jquery.json", "Error adding manifest files to git." ); git( "commit -am 'Tagging the " + newVersion + " release.'", "Error committing release changes." ); echo(); echo( "Tagging release..." ); git( "tag " + newVersion, "Error tagging " + newVersion + "." ); tagTime = git( "log -1 --format='%ad'", "Error getting tag timestamp." ).trim(); } function buildPackage( callback ) { if( preRelease ) { return buildPreReleasePackage( callback ); } else { return buildCDNPackage( callback ); } } function buildPreReleasePackage( callback ) { var build, files, jqueryUi, packer, target, targetZip; echo( "Build pre-release Package" ); jqueryUi = new downloadBuilder.JqueryUi( path.resolve( "." ) ); build = new downloadBuilder.Builder( jqueryUi, ":all:" ); packer = new downloadBuilder.Packer( build, null, { addTests: true, bundleSuffix: "", skipDocs: true, skipTheme: true }); target = "../" + jqueryUi.pkg.name + "-" + jqueryUi.pkg.version; targetZip = target + ".zip"; return walk([ function( callback ) { echo( "Building release files" ); packer.pack(function( error, _files ) { if( error ) { abort( error.stack ); } files = _files.map(function( file ) { // Strip first path file.path = file.path.replace( /^[^\/]*\//, "" ); return file; }).filter(function( file ) { // Filter development-bundle content only return (/^development-bundle/).test( file.path ); }).map(function( file ) { // Strip development-bundle file.path = file.path.replace( /^development-bundle\//, "" ); return file; }); return callback(); }); }, function() { downloadBuilder.util.createZip( files, targetZip, function( error ) { if ( error ) { abort( error.stack ); } echo( "Built zip package at " + path.relative( "../..", targetZip ).cyan ); return callback(); }); } ]); } function buildCDNPackage( callback ) { var build, output, target, targetZip, add = function( file ) { output.push( file ); }, jqueryUi = new downloadBuilder.JqueryUi( path.resolve( "." ) ), themeGallery = downloadBuilder.themeGallery( jqueryUi ); echo( "Build CDN Package" ); build = new downloadBuilder.Builder( jqueryUi, ":all:" ); output = []; target = "../" + jqueryUi.pkg.name + "-" + jqueryUi.pkg.version + "-cdn"; targetZip = target + ".zip"; [ "AUTHORS.txt", "MIT-LICENSE.txt", "package.json" ].map(function( name ) { return build.get( name ); }).forEach( add ); // "ui/*.js" build.componentFiles.filter(function( file ) { return (/^ui\//).test( file.path ); }).forEach( add ); // "ui/*.min.js" build.componentMinFiles.filter(function( file ) { return (/^ui\//).test( file.path ); }).forEach( add ); // "i18n/*.js" build.i18nFiles.rename( /^ui\//, "" ).forEach( add ); build.i18nMinFiles.rename( /^ui\//, "" ).forEach( add ); build.bundleI18n.into( "i18n/" ).forEach( add ); build.bundleI18nMin.into( "i18n/" ).forEach( add ); build.bundleJs.forEach( add ); build.bundleJsMin.forEach( add ); walk( themeGallery.map(function( theme ) { return function( callback ) { var themeCssOnlyRe, themeDirRe, folderName = theme.folderName(), packer = new downloadBuilder.Packer( build, theme, { skipDocs: true }); // TODO improve code by using custom packer instead of download packer (Packer) themeCssOnlyRe = new RegExp( "development-bundle/themes/" + folderName + "/jquery.ui.theme.css" ); themeDirRe = new RegExp( "css/" + folderName ); packer.pack(function( error, files ) { if ( error ) { abort( error.stack ); } // Add theme files. files // Pick only theme files we need on the bundle. .filter(function( file ) { if ( themeCssOnlyRe.test( file.path ) || themeDirRe.test( file.path ) ) { return true; } return false; }) // Convert paths the way bundle needs .map(function( file ) { file.path = file.path // Remove initial package name eg. "jquery-ui-1.10.0.custom" .split( "/" ).slice( 1 ).join( "/" ) .replace( /development-bundle\/themes/, "css" ) .replace( /css/, "themes" ) // Make jquery-ui-1.10.0.custom.css into jquery-ui.css, or jquery-ui-1.10.0.custom.min.css into jquery-ui.min.css .replace( /jquery-ui-.*?(\.min)*\.css/, "jquery-ui$1.css" ); return file; }).forEach( add ); return callback(); }); }; }).concat([function() { var crypto = require( "crypto" ); // Create MD5 manifest output.push({ path: "MANIFEST", data: output.sort(function( a, b ) { return a.path.localeCompare( b.path ); }).map(function( file ) { var md5 = crypto.createHash( "md5" ); md5.update( file.data ); return file.path + " " + md5.digest( "hex" ); }).join( "\n" ) }); downloadBuilder.util.createZip( output, targetZip, function( error ) { if ( error ) { abort( error.stack ); } echo( "Built zip CDN package at " + path.relative( "../..", targetZip ).cyan ); return callback(); }); }])); } function pushRelease() { echo( "Pushing release to GitHub..." ); git( "push --tags", "Error pushing tags to GitHub." ); } function updateBranchVersion() { // Pre-releases don't change the master version if ( preRelease ) { return; } var pkg; echo( "Checking out " + branch.cyan + " branch..." ); git( "checkout " + branch, "Error checking out " + branch + " branch." ); echo( "Updating package.json..." ); pkg = readPackage(); pkg.version = nextVersion; writePackage( pkg ); echo( "Committing version update..." ); git( "commit -am 'Updating the " + branch + " version to " + nextVersion + ".'", "Error committing package.json." ); } function pushBranch() { // Pre-releases don't change the master version if ( preRelease ) { return; } echo( "Pushing " + branch.cyan + " to GitHub..." ); git( "push", "Error pushing to GitHub." ); } function generateChangelog() { if ( preRelease ) { return; } var commits, changelogPath = baseDir + "/changelog", changelog = cat( "build/release/changelog-shell" ) + "\n", fullFormat = "* %s (TICKETREF, [%h](http://github.com/jquery/jquery-ui/commit/%H))"; changelog = changelog.replace( "{title}", "jQuery UI " + newVersion + " Changelog" ); echo ( "Adding commits..." ); commits = gitLog( fullFormat ); echo( "Adding links to tickets..." ); changelog += commits // Add ticket references .map(function( commit ) { var tickets = []; commit.replace( /Fixe[sd] #(\d+)/g, function( match, ticket ) { tickets.push( ticket ); }); return tickets.length ? commit.replace( "TICKETREF", tickets.map(function( ticket ) { return "[#" + ticket + "](http://bugs.jqueryui.com/ticket/" + ticket + ")"; }).join( ", " ) ) : // Leave TICKETREF token in place so it's easy to find commits without tickets commit; }) // Sort commits so that they're grouped by component .sort() .join( "\n" ) + "\n"; echo( "Adding Trac tickets..." ); changelog += trac( "/query?milestone=" + newVersion + "&resolution=fixed" + "&col=id&col=component&col=summary&order=component" ) + "\n"; fs.writeFileSync( changelogPath, changelog ); echo( "Stored changelog in " + changelogPath.cyan + "." ); } function gatherContributors() { if ( preRelease ) { return; } var contributors, contributorsPath = baseDir + "/contributors"; echo( "Adding committers and authors..." ); contributors = gitLog( "%aN%n%cN" ); echo( "Adding reporters and commenters from Trac..." ); contributors = contributors.concat( trac( "/report/22?V=" + newVersion + "&max=-1" ) .split( rnewline ) // Remove header and trailing newline .slice( 1, -1 ) ); echo( "Sorting contributors..." ); contributors = unique( contributors ).sort(function( a, b ) { return a.toLowerCase() < b.toLowerCase() ? -1 : 1; }); echo ( "Adding people thanked in commits..." ); contributors = contributors.concat( gitLog( "%b%n%s" ).filter(function( line ) { return (/thank/i).test( line ); })); fs.writeFileSync( contributorsPath, contributors.join( "\n" ) ); echo( "Stored contributors in " + contributorsPath.cyan + "." ); } function updateTrac() { echo( newVersion.cyan + " was tagged at " + tagTime.cyan + "." ); if ( !preRelease ) { echo( "Close the " + newVersion.cyan + " Milestone." ); } echo( "Create the " + newVersion.cyan + " Version." ); echo( "When Trac asks for date and time, match the above. Should only change minutes and seconds." ); echo( "Create a Milestone for the next minor release." ); } // ===== HELPER FUNCTIONS ====================================================== function git( command, errorMessage ) { var result = exec( "git " + command ); if ( result.code !== 0 ) { abort( errorMessage ); } return result.output; } function gitLog( format ) { var result = exec( "git log " + prevVersion + ".." + newVersion + " " + "--format='" + format + "'", { silent: true }); if ( result.code !== 0 ) { abort( "Error getting git log." ); } result = result.output.split( rnewline ); if ( result[ result.length - 1 ] === "" ) { result.pop(); } return result; } function trac( path ) { var result = exec( "curl -s 'http://bugs.jqueryui.com" + path + "&format=tab'", { silent: true }); if ( result.code !== 0 ) { abort( "Error getting Trac data." ); } return result.output; } function unique( arr ) { var obj = {}; arr.forEach(function( item ) { obj[ item ] = 1; }); return Object.keys( obj ); } function readPackage() { return JSON.parse( fs.readFileSync( repoDir + "/package.json" ) ); } function writePackage( pkg ) { fs.writeFileSync( repoDir + "/package.json", JSON.stringify( pkg, null, "\t" ) + "\n" ); } function bootstrap( fn ) { getRemote(function( remote ) { if ( (/:/).test( remote ) || fs.existsSync( remote ) ) { repo = remote; } else { repo = "git@github.com:" + remote + ".git"; } _bootstrap( fn ); }); } function getRemote( fn ) { var matches, remote; console.log( "Determining remote repo..." ); process.argv.forEach(function( arg ) { matches = /--remote=(.+)/.exec( arg ); if ( matches ) { remote = matches[ 1 ]; } }); if ( remote ) { fn( remote ); return; } console.log(); console.log( " !!!!!!!!!!!!!!!!!!!!!!!!!!!!" ); console.log( " !!!!!!!!!!!!!!!!!!!!!!!!!!!!" ); console.log( " !! !!" ); console.log( " !! Using jquery/jquery-ui !!" ); console.log( " !! !!" ); console.log( " !!!!!!!!!!!!!!!!!!!!!!!!!!!!" ); console.log( " !!!!!!!!!!!!!!!!!!!!!!!!!!!!" ); console.log(); console.log( "Press enter to continue, or ctrl+c to cancel." ); prompt(function() { fn( "jquery/jquery-ui" ); }); } function _bootstrap( fn ) { console.log( "Determining release type..." ); preRelease = process.argv.indexOf( "--pre-release" ); if ( preRelease !== -1 ) { preRelease = process.argv[ preRelease + 1 ]; console.log( "pre-release" ); } else { preRelease = null; console.log( "stable release" ); } console.log( "Determining directories..." ); baseDir = process.cwd() + "/__release"; repoDir = baseDir + "/repo"; if ( fs.existsSync( baseDir ) ) { console.log( "The directory '" + baseDir + "' already exists." ); console.log( "Aborting." ); process.exit( 1 ); } console.log( "Creating directory..." ); fs.mkdirSync( baseDir ); console.log( "Installing dependencies..." ); require( "child_process" ).exec( "npm install shelljs colors download.jqueryui.com@1.10.3-4", function( error ) { if ( error ) { console.log( error ); return process.exit( 1 ); } require( "shelljs/global" ); require( "colors" ); downloadBuilder = require( "download.jqueryui.com" ); fn(); }); } function section( name ) { return function() { echo(); echo( "##" ); echo( "## " + name.toUpperCase().magenta ); echo( "##" ); echo(); }; } function prompt( fn ) { process.stdin.once( "data", function( chunk ) { process.stdin.pause(); fn( chunk.toString().trim() ); }); process.stdin.resume(); } function confirm( fn ) { echo( "Press enter to continue, or ctrl+c to cancel.".yellow ); prompt( fn ); } function confirmReview( fn ) { echo( "Please review the output and generated files as a sanity check.".yellow ); confirm( fn ); } function abort( msg ) { echo( msg.red ); echo( "Aborting.".red ); exit( 1 ); } function walk( methods ) { var method = methods.shift(); function next() { if ( methods.length ) { walk( methods ); } } if ( !method.length ) { method(); next(); } else { method( next ); } }
yuyang545262477/Resume
项目三jQueryMobile/bower_components/jquery-ui/build/release/release.js
JavaScript
mit
16,898
<?php /* * This file is part of the Sylius package. * * (c) Paweł Jędrzejewski * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Sylius\Behat\Page\Admin\ExchangeRate; use Sylius\Behat\Page\Admin\Crud\UpdatePageInterface as BaseUpdatePageInterface; /** * @author Jan Góralski <jan.goralski@lakion.com> */ interface UpdatePageInterface extends BaseUpdatePageInterface { /** * @return string */ public function getRatio(); /** * @param string $ratio */ public function changeRatio($ratio); /** * @return bool */ public function isSourceCurrencyDisabled(); /** * @return bool */ public function isTargetCurrencyDisabled(); }
MichaelKubovic/Sylius
src/Sylius/Behat/Page/Admin/ExchangeRate/UpdatePageInterface.php
PHP
mit
802
(function ($) { $.Redactor.opts.langs['el'] = { html: 'HTML', video: 'Εισαγωγή βίντεο...', image: 'Εισαγωγή εικόνας...', table: 'Πίνακας', link: 'Σύνδεσμος', link_insert: 'Εισαγωγή συνδέσμου...', link_edit: 'Edit link', unlink: 'Ακύρωση συνδέσμου', formatting: 'Μορφοποίηση', paragraph: 'Παράγραφος', quote: 'Παράθεση', code: 'Κώδικας', header1: 'Κεφαλίδα 1', header2: 'Κεφαλίδα 2', header3: 'Κεφαλίδα 3', header4: 'Κεφαλίδα 4', bold: 'Έντονα', italic: 'Πλάγια', fontcolor: 'Χρώμα γραμματοσειράς', backcolor: 'Χρώμα επισήμανσης κειμένου', unorderedlist: 'Κουκκίδες', orderedlist: 'Αρίθμηση', outdent: 'Μείωση εσοχής', indent: 'Αύξηση εσοχής', cancel: 'Ακύρωση', insert: 'Εισαγωγή', save: 'Αποθήκευση', _delete: 'Διαγραφή', insert_table: 'Εισαγωγή πίνακα...', insert_row_above: 'Προσθήκη σειράς επάνω', insert_row_below: 'Προσθήκη σειράς κάτω', insert_column_left: 'Προσθήκη στήλης αριστερά', insert_column_right: 'Προσθήκη στήλης δεξιά', delete_column: 'Διαγραφή στήλης', delete_row: 'Διαγραφή σειράς', delete_table: 'Διαγραφή πίνακα', rows: 'Γραμμές', columns: 'Στήλες', add_head: 'Προσθήκη κεφαλίδας', delete_head: 'Διαγραφή κεφαλίδας', title: 'Τίτλος', image_position: 'Θέση', none: 'Καμία', left: 'Αριστερά', right: 'Δεξιά', image_web_link: 'Υπερσύνδεσμος εικόνας', text: 'Κείμενο', mailto: 'Email', web: 'URL', video_html_code: 'Video Embed Code', file: 'Εισαγωγή αρχείου...', upload: 'Upload', download: 'Download', choose: 'Επέλεξε', or_choose: 'ή επέλεξε', drop_file_here: 'Σύρατε αρχεία εδώ', align_left: 'Στοίχιση αριστερά', align_center: 'Στοίχιση στο κέντρο', align_right: 'Στοίχιση δεξιά', align_justify: 'Πλήρησ στοίχηση', horizontalrule: 'Εισαγωγή οριζόντιας γραμμής', deleted: 'Διαγράφτηκε', anchor: 'Anchor', link_new_tab: 'Open link in new tab', underline: 'Underline', alignment: 'Alignment', filename: 'Name (optional)', edit: 'Edit' }; })( jQuery );
sho-wtag/catarse-2.0
vendor/cache/redactor-rails-e79c3b8359b4/vendor/assets/javascripts/redactor-rails/langs/el.js
JavaScript
mit
2,592
import Ember from 'ember-metal/core'; import { get } from 'ember-metal/property_get'; import { internal } from 'htmlbars-runtime'; import { read } from 'ember-metal/streams/utils'; export default { setupState(state, env, scope, params, hash) { var controller = hash.controller; if (controller) { if (!state.controller) { var context = params[0]; var controllerFactory = env.container.lookupFactory('controller:' + controller); var parentController = null; if (scope.locals.controller) { parentController = read(scope.locals.controller); } else if (scope.locals.view) { parentController = get(read(scope.locals.view), 'context'); } var controllerInstance = controllerFactory.create({ model: env.hooks.getValue(context), parentController: parentController, target: parentController }); params[0] = controllerInstance; return { controller: controllerInstance }; } return state; } return { controller: null }; }, isStable() { return true; }, isEmpty(state) { return false; }, render(morph, env, scope, params, hash, template, inverse, visitor) { if (morph.state.controller) { morph.addDestruction(morph.state.controller); hash.controller = morph.state.controller; } Ember.assert( '{{#with foo}} must be called with a single argument or the use the ' + '{{#with foo as |bar|}} syntax', params.length === 1 ); Ember.assert( 'The {{#with}} helper must be called with a block', !!template ); internal.continueBlock(morph, env, scope, 'with', params, hash, template, inverse, visitor); }, rerender(morph, env, scope, params, hash, template, inverse, visitor) { internal.continueBlock(morph, env, scope, 'with', params, hash, template, inverse, visitor); } };
cjc343/ember.js
packages/ember-htmlbars/lib/keywords/with.js
JavaScript
mit
1,930
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2007 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package org.mockito.asm.util; import org.mockito.asm.AnnotationVisitor; import org.mockito.asm.Attribute; import org.mockito.asm.FieldVisitor; /** * A {@link FieldVisitor} that checks that its methods are properly used. */ public class CheckFieldAdapter implements FieldVisitor { private final FieldVisitor fv; private boolean end; public CheckFieldAdapter(final FieldVisitor fv) { this.fv = fv; } public AnnotationVisitor visitAnnotation( final String desc, final boolean visible) { checkEnd(); CheckMethodAdapter.checkDesc(desc, false); return new CheckAnnotationAdapter(fv.visitAnnotation(desc, visible)); } public void visitAttribute(final Attribute attr) { checkEnd(); if (attr == null) { throw new IllegalArgumentException("Invalid attribute (must not be null)"); } fv.visitAttribute(attr); } public void visitEnd() { checkEnd(); end = true; fv.visitEnd(); } private void checkEnd() { if (end) { throw new IllegalStateException("Cannot call a visit method after visitEnd has been called"); } } }
wxcandy/Mahjong
org/mockito/asm/util/CheckFieldAdapter.java
Java
mit
2,946
// ==++== // // Copyright (c) Microsoft Corporation. All rights reserved. // // ==--== /*============================================================ ** ** Class: EventLogPermissionHolder ** ** Purpose: ** Internal class that defines the permissions that are used ** throughout the Event Log classes of this namespace. ** ============================================================*/ using System; using System.Security.Permissions; namespace System.Diagnostics.Eventing.Reader { internal class EventLogPermissionHolder { public EventLogPermissionHolder() { } public static EventLogPermission GetEventLogPermission() { EventLogPermission logPermission = new EventLogPermission(); EventLogPermissionEntry permEntry = new EventLogPermissionEntry(EventLogPermissionAccess.Administer, "."); logPermission.PermissionEntries.Add(permEntry); return logPermission; } } }
sekcheong/referencesource
System.Core/System/Diagnostics/Eventing/Reader/EventLogPermissionHolder.cs
C#
mit
983
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.loader; public class Constants { public static final String Package = "org.apache.catalina.loader"; }
plumer/codana
tomcat_files/7.0.61/Constants (2).java
Java
mit
945
/* * Copyright 2009 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mybatis.generator.logging; /** * Defines the interface for creating Log implementations. * * @author Jeff Butler * */ public interface AbstractLogFactory { Log getLog(Class<?> aClass); }
NanYoMy/mybatis-generator
src/main/java/org/mybatis/generator/logging/AbstractLogFactory.java
Java
mit
836
/* $Id$ */ /* * This file is part of OpenTTD. * OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2. * OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>. */ /** @file script_info_dummy.cpp Implementation of a dummy Script. */ #include "../stdafx.h" #include <squirrel.h> #include "../string_func.h" #include "../strings_func.h" #include "../safeguards.h" /* The reason this exists in C++, is that a user can trash his ai/ or game/ dir, * leaving no Scripts available. The complexity to solve this is insane, and * therefore the alternative is used, and make sure there is always a Script * available, no matter what the situation is. By defining it in C++, there * is simply no way a user can delete it, and therefore safe to use. It has * to be noted that this Script is complete invisible for the user, and impossible * to select manual. It is a fail-over in case no Scripts are available. */ /** Run the dummy info.nut. */ void Script_CreateDummyInfo(HSQUIRRELVM vm, const char *type, const char *dir) { char dummy_script[4096]; char *dp = dummy_script; dp += seprintf(dp, lastof(dummy_script), "class Dummy%s extends %sInfo {\n", type, type); dp += seprintf(dp, lastof(dummy_script), "function GetAuthor() { return \"OpenTTD Developers Team\"; }\n"); dp += seprintf(dp, lastof(dummy_script), "function GetName() { return \"Dummy%s\"; }\n", type); dp += seprintf(dp, lastof(dummy_script), "function GetShortName() { return \"DUMM\"; }\n"); dp += seprintf(dp, lastof(dummy_script), "function GetDescription() { return \"A Dummy %s that is loaded when your %s/ dir is empty\"; }\n", type, dir); dp += seprintf(dp, lastof(dummy_script), "function GetVersion() { return 1; }\n"); dp += seprintf(dp, lastof(dummy_script), "function GetDate() { return \"2008-07-26\"; }\n"); dp += seprintf(dp, lastof(dummy_script), "function CreateInstance() { return \"Dummy%s\"; }\n", type); dp += seprintf(dp, lastof(dummy_script), "} RegisterDummy%s(Dummy%s());\n", type, type); const SQChar *sq_dummy_script = dummy_script; sq_pushroottable(vm); /* Load and run the script */ if (SQ_SUCCEEDED(sq_compilebuffer(vm, sq_dummy_script, strlen(sq_dummy_script), "dummy", SQTrue))) { sq_push(vm, -2); if (SQ_SUCCEEDED(sq_call(vm, 1, SQFalse, SQTrue))) { sq_pop(vm, 1); return; } } NOT_REACHED(); } /** Run the dummy AI and let it generate an error message. */ void Script_CreateDummy(HSQUIRRELVM vm, StringID string, const char *type) { /* We want to translate the error message. * We do this in three steps: * 1) We get the error message */ char error_message[1024]; GetString(error_message, string, lastof(error_message)); /* Make escapes for all quotes and slashes. */ char safe_error_message[1024]; char *q = safe_error_message; for (const char *p = error_message; *p != '\0' && q < lastof(safe_error_message) - 2; p++, q++) { if (*p == '"' || *p == '\\') *q++ = '\\'; *q = *p; } *q = '\0'; /* 2) We construct the AI's code. This is done by merging a header, body and footer */ char dummy_script[4096]; char *dp = dummy_script; dp += seprintf(dp, lastof(dummy_script), "class Dummy%s extends %sController {\n function Start()\n {\n", type, type); /* As special trick we need to split the error message on newlines and * emit each newline as a separate error printing string. */ char *newline; char *p = safe_error_message; do { newline = strchr(p, '\n'); if (newline != NULL) *newline = '\0'; dp += seprintf(dp, lastof(dummy_script), " %sLog.Error(\"%s\");\n", type, p); p = newline + 1; } while (newline != NULL); dp = strecpy(dp, " }\n}\n", lastof(dummy_script)); /* 3) We translate the error message in the character format that Squirrel wants. * We can use the fact that the wchar string printing also uses %s to print * old style char strings, which is what was generated during the script generation. */ const SQChar *sq_dummy_script = dummy_script; /* And finally we load and run the script */ sq_pushroottable(vm); if (SQ_SUCCEEDED(sq_compilebuffer(vm, sq_dummy_script, strlen(sq_dummy_script), "dummy", SQTrue))) { sq_push(vm, -2); if (SQ_SUCCEEDED(sq_call(vm, 1, SQFalse, SQTrue))) { sq_pop(vm, 1); return; } } NOT_REACHED(); }
alex34567/openttd-emscripten
src/script/script_info_dummy.cpp
C++
gpl-2.0
4,715
class CfgUnitInsignia { class ACE_insignia_logo { displayName = "ACE3"; author = CSTRING(ACETeam); texture = PATHTOF(data\Insignia_ace3logo_ca.paa); textureVehicle = ""; }; class ACE_insignia_banana { displayName = "ABE3"; author = CSTRING(ACETeam); texture = PATHTOF(data\insignia_banana_ca.paa); textureVehicle = ""; }; };
MikeMatrix/ACE3
addons/common/CfgUnitInsignia.hpp
C++
gpl-2.0
406
/* * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ #ifndef SHARE_VM_ASM_CODEBUFFER_HPP #define SHARE_VM_ASM_CODEBUFFER_HPP #include "code/oopRecorder.hpp" #include "code/relocInfo.hpp" class CodeStrings; class PhaseCFG; class Compile; class BufferBlob; class CodeBuffer; class Label; class CodeOffsets: public StackObj { public: enum Entries { Entry, Verified_Entry, Frame_Complete, // Offset in the code where the frame setup is (for forte stackwalks) is complete OSR_Entry, Dtrace_trap = OSR_Entry, // dtrace probes can never have an OSR entry so reuse it Exceptions, // Offset where exception handler lives Deopt, // Offset where deopt handler lives DeoptMH, // Offset where MethodHandle deopt handler lives UnwindHandler, // Offset to default unwind handler max_Entries }; // special value to note codeBlobs where profile (forte) stack walking is // always dangerous and suspect. enum { frame_never_safe = -1 }; private: int _values[max_Entries]; public: CodeOffsets() { _values[Entry ] = 0; _values[Verified_Entry] = 0; _values[Frame_Complete] = frame_never_safe; _values[OSR_Entry ] = 0; _values[Exceptions ] = -1; _values[Deopt ] = -1; _values[DeoptMH ] = -1; _values[UnwindHandler ] = -1; } int value(Entries e) { return _values[e]; } void set_value(Entries e, int val) { _values[e] = val; } }; // This class represents a stream of code and associated relocations. // There are a few in each CodeBuffer. // They are filled concurrently, and concatenated at the end. class CodeSection VALUE_OBJ_CLASS_SPEC { friend class CodeBuffer; public: typedef int csize_t; // code size type; would be size_t except for history private: address _start; // first byte of contents (instructions) address _mark; // user mark, usually an instruction beginning address _end; // current end address address _limit; // last possible (allocated) end address relocInfo* _locs_start; // first byte of relocation information relocInfo* _locs_end; // first byte after relocation information relocInfo* _locs_limit; // first byte after relocation information buf address _locs_point; // last relocated position (grows upward) bool _locs_own; // did I allocate the locs myself? bool _frozen; // no more expansion of this section char _index; // my section number (SECT_INST, etc.) CodeBuffer* _outer; // enclosing CodeBuffer // (Note: _locs_point used to be called _last_reloc_offset.) CodeSection() { _start = NULL; _mark = NULL; _end = NULL; _limit = NULL; _locs_start = NULL; _locs_end = NULL; _locs_limit = NULL; _locs_point = NULL; _locs_own = false; _frozen = false; debug_only(_index = (char)-1); debug_only(_outer = (CodeBuffer*)badAddress); } void initialize_outer(CodeBuffer* outer, int index) { _outer = outer; _index = index; } void initialize(address start, csize_t size = 0) { assert(_start == NULL, "only one init step, please"); _start = start; _mark = NULL; _end = start; _limit = start + size; _locs_point = start; } void initialize_locs(int locs_capacity); void expand_locs(int new_capacity); void initialize_locs_from(const CodeSection* source_cs); // helper for CodeBuffer::expand() void take_over_code_from(CodeSection* cs) { _start = cs->_start; _mark = cs->_mark; _end = cs->_end; _limit = cs->_limit; _locs_point = cs->_locs_point; } public: address start() const { return _start; } address mark() const { return _mark; } address end() const { return _end; } address limit() const { return _limit; } csize_t size() const { return (csize_t)(_end - _start); } csize_t mark_off() const { assert(_mark != NULL, "not an offset"); return (csize_t)(_mark - _start); } csize_t capacity() const { return (csize_t)(_limit - _start); } csize_t remaining() const { return (csize_t)(_limit - _end); } relocInfo* locs_start() const { return _locs_start; } relocInfo* locs_end() const { return _locs_end; } int locs_count() const { return (int)(_locs_end - _locs_start); } relocInfo* locs_limit() const { return _locs_limit; } address locs_point() const { return _locs_point; } csize_t locs_point_off() const{ return (csize_t)(_locs_point - _start); } csize_t locs_capacity() const { return (csize_t)(_locs_limit - _locs_start); } csize_t locs_remaining()const { return (csize_t)(_locs_limit - _locs_end); } int index() const { return _index; } bool is_allocated() const { return _start != NULL; } bool is_empty() const { return _start == _end; } bool is_frozen() const { return _frozen; } bool has_locs() const { return _locs_end != NULL; } CodeBuffer* outer() const { return _outer; } // is a given address in this section? (2nd version is end-inclusive) bool contains(address pc) const { return pc >= _start && pc < _end; } bool contains2(address pc) const { return pc >= _start && pc <= _end; } bool allocates(address pc) const { return pc >= _start && pc < _limit; } bool allocates2(address pc) const { return pc >= _start && pc <= _limit; } void set_end(address pc) { assert(allocates2(pc), err_msg("not in CodeBuffer memory: " PTR_FORMAT " <= " PTR_FORMAT " <= " PTR_FORMAT, _start, pc, _limit)); _end = pc; } void set_mark(address pc) { assert(contains2(pc), "not in codeBuffer"); _mark = pc; } void set_mark_off(int offset) { assert(contains2(offset+_start),"not in codeBuffer"); _mark = offset + _start; } void set_mark() { _mark = _end; } void clear_mark() { _mark = NULL; } void set_locs_end(relocInfo* p) { assert(p <= locs_limit(), "locs data fits in allocated buffer"); _locs_end = p; } void set_locs_point(address pc) { assert(pc >= locs_point(), "relocation addr may not decrease"); assert(allocates2(pc), "relocation addr must be in this section"); _locs_point = pc; } // Code emission void emit_int8 ( int8_t x) { *((int8_t*) end()) = x; set_end(end() + sizeof(int8_t)); } void emit_int16( int16_t x) { *((int16_t*) end()) = x; set_end(end() + sizeof(int16_t)); } void emit_int32( int32_t x) { *((int32_t*) end()) = x; set_end(end() + sizeof(int32_t)); } void emit_int64( int64_t x) { *((int64_t*) end()) = x; set_end(end() + sizeof(int64_t)); } void emit_float( jfloat x) { *((jfloat*) end()) = x; set_end(end() + sizeof(jfloat)); } void emit_double(jdouble x) { *((jdouble*) end()) = x; set_end(end() + sizeof(jdouble)); } void emit_address(address x) { *((address*) end()) = x; set_end(end() + sizeof(address)); } // Share a scratch buffer for relocinfo. (Hacky; saves a resource allocation.) void initialize_shared_locs(relocInfo* buf, int length); // Manage labels and their addresses. address target(Label& L, address branch_pc); // Emit a relocation. void relocate(address at, RelocationHolder const& rspec, int format = 0); void relocate(address at, relocInfo::relocType rtype, int format = 0) { if (rtype != relocInfo::none) relocate(at, Relocation::spec_simple(rtype), format); } // alignment requirement for starting offset // Requirements are that the instruction area and the // stubs area must start on CodeEntryAlignment, and // the ctable on sizeof(jdouble) int alignment() const { return MAX2((int)sizeof(jdouble), (int)CodeEntryAlignment); } // Slop between sections, used only when allocating temporary BufferBlob buffers. static csize_t end_slop() { return MAX2((int)sizeof(jdouble), (int)CodeEntryAlignment); } csize_t align_at_start(csize_t off) const { return (csize_t) align_size_up(off, alignment()); } // Mark a section frozen. Assign its remaining space to // the following section. It will never expand after this point. inline void freeze(); // { _outer->freeze_section(this); } // Ensure there's enough space left in the current section. // Return true if there was an expansion. bool maybe_expand_to_ensure_remaining(csize_t amount); #ifndef PRODUCT void decode(); void dump(); void print(const char* name); #endif //PRODUCT }; class CodeString; class CodeStrings VALUE_OBJ_CLASS_SPEC { private: #ifndef PRODUCT CodeString* _strings; #endif CodeString* find(intptr_t offset) const; CodeString* find_last(intptr_t offset) const; public: CodeStrings() { #ifndef PRODUCT _strings = NULL; #endif } const char* add_string(const char * string) PRODUCT_RETURN_(return NULL;); void add_comment(intptr_t offset, const char * comment) PRODUCT_RETURN; void print_block_comment(outputStream* stream, intptr_t offset) const PRODUCT_RETURN; void assign(CodeStrings& other) PRODUCT_RETURN; void free() PRODUCT_RETURN; }; // A CodeBuffer describes a memory space into which assembly // code is generated. This memory space usually occupies the // interior of a single BufferBlob, but in some cases it may be // an arbitrary span of memory, even outside the code cache. // // A code buffer comes in two variants: // // (1) A CodeBuffer referring to an already allocated piece of memory: // This is used to direct 'static' code generation (e.g. for interpreter // or stubroutine generation, etc.). This code comes with NO relocation // information. // // (2) A CodeBuffer referring to a piece of memory allocated when the // CodeBuffer is allocated. This is used for nmethod generation. // // The memory can be divided up into several parts called sections. // Each section independently accumulates code (or data) an relocations. // Sections can grow (at the expense of a reallocation of the BufferBlob // and recopying of all active sections). When the buffered code is finally // written to an nmethod (or other CodeBlob), the contents (code, data, // and relocations) of the sections are padded to an alignment and concatenated. // Instructions and data in one section can contain relocatable references to // addresses in a sibling section. class CodeBuffer: public StackObj { friend class CodeSection; private: // CodeBuffers must be allocated on the stack except for a single // special case during expansion which is handled internally. This // is done to guarantee proper cleanup of resources. void* operator new(size_t size) throw() { return ResourceObj::operator new(size); } void operator delete(void* p) { ShouldNotCallThis(); } public: typedef int csize_t; // code size type; would be size_t except for history enum { // Here is the list of all possible sections. The order reflects // the final layout. SECT_FIRST = 0, SECT_CONSTS = SECT_FIRST, // Non-instruction data: Floats, jump tables, etc. SECT_INSTS, // Executable instructions. SECT_STUBS, // Outbound trampolines for supporting call sites. SECT_LIMIT, SECT_NONE = -1 }; private: enum { sect_bits = 2, // assert (SECT_LIMIT <= (1<<sect_bits)) sect_mask = (1<<sect_bits)-1 }; const char* _name; CodeSection _consts; // constants, jump tables CodeSection _insts; // instructions (the main section) CodeSection _stubs; // stubs (call site support), deopt, exception handling CodeBuffer* _before_expand; // dead buffer, from before the last expansion BufferBlob* _blob; // optional buffer in CodeCache for generated code address _total_start; // first address of combined memory buffer csize_t _total_size; // size in bytes of combined memory buffer OopRecorder* _oop_recorder; CodeStrings _strings; OopRecorder _default_oop_recorder; // override with initialize_oop_recorder Arena* _overflow_arena; address _decode_begin; // start address for decode address decode_begin(); void initialize_misc(const char * name) { // all pointers other than code_start/end and those inside the sections assert(name != NULL, "must have a name"); _name = name; _before_expand = NULL; _blob = NULL; _oop_recorder = NULL; _decode_begin = NULL; _overflow_arena = NULL; } void initialize(address code_start, csize_t code_size) { _consts.initialize_outer(this, SECT_CONSTS); _insts.initialize_outer(this, SECT_INSTS); _stubs.initialize_outer(this, SECT_STUBS); _total_start = code_start; _total_size = code_size; // Initialize the main section: _insts.initialize(code_start, code_size); assert(!_stubs.is_allocated(), "no garbage here"); assert(!_consts.is_allocated(), "no garbage here"); _oop_recorder = &_default_oop_recorder; } void initialize_section_size(CodeSection* cs, csize_t size); void freeze_section(CodeSection* cs); // helper for CodeBuffer::expand() void take_over_code_from(CodeBuffer* cs); // ensure sections are disjoint, ordered, and contained in the blob void verify_section_allocation(); // copies combined relocations to the blob, returns bytes copied // (if target is null, it is a dry run only, just for sizing) csize_t copy_relocations_to(CodeBlob* blob) const; // copies combined code to the blob (assumes relocs are already in there) void copy_code_to(CodeBlob* blob); // moves code sections to new buffer (assumes relocs are already in there) void relocate_code_to(CodeBuffer* cb) const; // set up a model of the final layout of my contents void compute_final_layout(CodeBuffer* dest) const; // Expand the given section so at least 'amount' is remaining. // Creates a new, larger BufferBlob, and rewrites the code & relocs. void expand(CodeSection* which_cs, csize_t amount); // Helper for expand. csize_t figure_expanded_capacities(CodeSection* which_cs, csize_t amount, csize_t* new_capacity); public: // (1) code buffer referring to pre-allocated instruction memory CodeBuffer(address code_start, csize_t code_size) { assert(code_start != NULL, "sanity"); initialize_misc("static buffer"); initialize(code_start, code_size); verify_section_allocation(); } // (2) CodeBuffer referring to pre-allocated CodeBlob. CodeBuffer(CodeBlob* blob); // (3) code buffer allocating codeBlob memory for code & relocation // info but with lazy initialization. The name must be something // informative. CodeBuffer(const char* name) { initialize_misc(name); } // (4) code buffer allocating codeBlob memory for code & relocation // info. The name must be something informative and code_size must // include both code and stubs sizes. CodeBuffer(const char* name, csize_t code_size, csize_t locs_size) { initialize_misc(name); initialize(code_size, locs_size); } ~CodeBuffer(); // Initialize a CodeBuffer constructed using constructor 3. Using // constructor 4 is equivalent to calling constructor 3 and then // calling this method. It's been factored out for convenience of // construction. void initialize(csize_t code_size, csize_t locs_size); CodeSection* consts() { return &_consts; } CodeSection* insts() { return &_insts; } CodeSection* stubs() { return &_stubs; } // present sections in order; return NULL at end; consts is #0, etc. CodeSection* code_section(int n) { // This makes the slightly questionable but portable assumption // that the various members (_consts, _insts, _stubs, etc.) are // adjacent in the layout of CodeBuffer. CodeSection* cs = &_consts + n; assert(cs->index() == n || !cs->is_allocated(), "sanity"); return cs; } const CodeSection* code_section(int n) const { // yucky const stuff return ((CodeBuffer*)this)->code_section(n); } static const char* code_section_name(int n); int section_index_of(address addr) const; bool contains(address addr) const { // handy for debugging return section_index_of(addr) > SECT_NONE; } // A stable mapping between 'locators' (small ints) and addresses. static int locator_pos(int locator) { return locator >> sect_bits; } static int locator_sect(int locator) { return locator & sect_mask; } static int locator(int pos, int sect) { return (pos << sect_bits) | sect; } int locator(address addr) const; address locator_address(int locator) const; // Heuristic for pre-packing the taken/not-taken bit of a predicted branch. bool is_backward_branch(Label& L); // Properties const char* name() const { return _name; } CodeBuffer* before_expand() const { return _before_expand; } BufferBlob* blob() const { return _blob; } void set_blob(BufferBlob* blob); void free_blob(); // Free the blob, if we own one. // Properties relative to the insts section: address insts_begin() const { return _insts.start(); } address insts_end() const { return _insts.end(); } void set_insts_end(address end) { _insts.set_end(end); } address insts_limit() const { return _insts.limit(); } address insts_mark() const { return _insts.mark(); } void set_insts_mark() { _insts.set_mark(); } void clear_insts_mark() { _insts.clear_mark(); } // is there anything in the buffer other than the current section? bool is_pure() const { return insts_size() == total_content_size(); } // size in bytes of output so far in the insts sections csize_t insts_size() const { return _insts.size(); } // same as insts_size(), except that it asserts there is no non-code here csize_t pure_insts_size() const { assert(is_pure(), "no non-code"); return insts_size(); } // capacity in bytes of the insts sections csize_t insts_capacity() const { return _insts.capacity(); } // number of bytes remaining in the insts section csize_t insts_remaining() const { return _insts.remaining(); } // is a given address in the insts section? (2nd version is end-inclusive) bool insts_contains(address pc) const { return _insts.contains(pc); } bool insts_contains2(address pc) const { return _insts.contains2(pc); } // Record any extra oops required to keep embedded metadata alive void finalize_oop_references(methodHandle method); // Allocated size in all sections, when aligned and concatenated // (this is the eventual state of the content in its final // CodeBlob). csize_t total_content_size() const; // Combined offset (relative to start of first section) of given // section, as eventually found in the final CodeBlob. csize_t total_offset_of(CodeSection* cs) const; // allocated size of all relocation data, including index, rounded up csize_t total_relocation_size() const; // allocated size of any and all recorded oops csize_t total_oop_size() const { OopRecorder* recorder = oop_recorder(); return (recorder == NULL)? 0: recorder->oop_size(); } // allocated size of any and all recorded metadata csize_t total_metadata_size() const { OopRecorder* recorder = oop_recorder(); return (recorder == NULL)? 0: recorder->metadata_size(); } // Configuration functions, called immediately after the CB is constructed. // The section sizes are subtracted from the original insts section. // Note: Call them in reverse section order, because each steals from insts. void initialize_consts_size(csize_t size) { initialize_section_size(&_consts, size); } void initialize_stubs_size(csize_t size) { initialize_section_size(&_stubs, size); } // Override default oop recorder. void initialize_oop_recorder(OopRecorder* r); OopRecorder* oop_recorder() const { return _oop_recorder; } CodeStrings& strings() { return _strings; } // Code generation void relocate(address at, RelocationHolder const& rspec, int format = 0) { _insts.relocate(at, rspec, format); } void relocate(address at, relocInfo::relocType rtype, int format = 0) { _insts.relocate(at, rtype, format); } // Management of overflow storage for binding of Labels. GrowableArray<int>* create_patch_overflow(); // NMethod generation void copy_code_and_locs_to(CodeBlob* blob) { assert(blob != NULL, "sane"); copy_relocations_to(blob); copy_code_to(blob); } void copy_values_to(nmethod* nm) { if (!oop_recorder()->is_unused()) { oop_recorder()->copy_values_to(nm); } } // Transform an address from the code in this code buffer to a specified code buffer address transform_address(const CodeBuffer &cb, address addr) const; void block_comment(intptr_t offset, const char * comment) PRODUCT_RETURN; const char* code_string(const char* str) PRODUCT_RETURN_(return NULL;); // Log a little info about section usage in the CodeBuffer void log_section_sizes(const char* name); #ifndef PRODUCT public: // Printing / Decoding // decodes from decode_begin() to code_end() and sets decode_begin to end void decode(); void decode_all(); // decodes all the code void skip_decode(); // sets decode_begin to code_end(); void print(); #endif // The following header contains architecture-specific implementations #ifdef TARGET_ARCH_x86 # include "codeBuffer_x86.hpp" #endif #ifdef TARGET_ARCH_sparc # include "codeBuffer_sparc.hpp" #endif #ifdef TARGET_ARCH_zero # include "codeBuffer_zero.hpp" #endif #ifdef TARGET_ARCH_arm # include "codeBuffer_arm.hpp" #endif #ifdef TARGET_ARCH_ppc # include "codeBuffer_ppc.hpp" #endif }; inline void CodeSection::freeze() { _outer->freeze_section(this); } inline bool CodeSection::maybe_expand_to_ensure_remaining(csize_t amount) { if (remaining() < amount) { _outer->expand(this, amount); return true; } return false; } #endif // SHARE_VM_ASM_CODEBUFFER_HPP
TobiHartmann/hotspot
src/share/vm/asm/codeBuffer.hpp
C++
gpl-2.0
23,840
// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package patch import ( "bytes" "compress/zlib" "crypto/sha1" "encoding/git85" "fmt" "io" "os" ) func gitSHA1(data []byte) []byte { if len(data) == 0 { // special case: 0 length is all zeros sum return make([]byte, 20) } h := sha1.New() fmt.Fprintf(h, "blob %d\x00", len(data)) h.Write(data) return h.Sum() } // BUG(rsc): The Git binary delta format is not implemented, only Git binary literals. // GitBinaryLiteral represents a Git binary literal diff. type GitBinaryLiteral struct { OldSHA1 []byte // if non-empty, the SHA1 hash of the original New []byte // the new contents } // Apply implements the Diff interface's Apply method. func (d *GitBinaryLiteral) Apply(old []byte) ([]byte, os.Error) { if sum := gitSHA1(old); !bytes.HasPrefix(sum, d.OldSHA1) { return nil, ErrPatchFailure } return d.New, nil } func unhex(c byte) uint8 { switch { case '0' <= c && c <= '9': return c - '0' case 'a' <= c && c <= 'f': return c - 'a' + 10 case 'A' <= c && c <= 'F': return c - 'A' + 10 } return 255 } func getHex(s []byte) (data []byte, rest []byte) { n := 0 for n < len(s) && unhex(s[n]) != 255 { n++ } n &^= 1 // Only take an even number of hex digits. data = make([]byte, n/2) for i := range data { data[i] = unhex(s[2*i])<<4 | unhex(s[2*i+1]) } rest = s[n:] return } // ParseGitBinary parses raw as a Git binary patch. func ParseGitBinary(raw []byte) (Diff, os.Error) { var oldSHA1, newSHA1 []byte var sawBinary bool for { var first []byte first, raw, _ = getLine(raw, 1) first = bytes.TrimSpace(first) if s, ok := skip(first, "index "); ok { oldSHA1, s = getHex(s) if s, ok = skip(s, ".."); !ok { continue } newSHA1, s = getHex(s) continue } if _, ok := skip(first, "GIT binary patch"); ok { sawBinary = true continue } if n, _, ok := atoi(first, "literal ", 10); ok && sawBinary { data := make([]byte, n) d := git85.NewDecoder(bytes.NewBuffer(raw)) z, err := zlib.NewReader(d) if err != nil { return nil, err } defer z.Close() if _, err = io.ReadFull(z, data); err != nil { if err == os.EOF { err = io.ErrUnexpectedEOF } return nil, err } var buf [1]byte m, err := z.Read(buf[0:]) if m != 0 || err != os.EOF { return nil, os.NewError("Git binary literal longer than expected") } if sum := gitSHA1(data); !bytes.HasPrefix(sum, newSHA1) { return nil, os.NewError("Git binary literal SHA1 mismatch") } return &GitBinaryLiteral{oldSHA1, data}, nil } if !sawBinary { return nil, os.NewError("unexpected Git patch header: " + string(first)) } } panic("unreachable") }
SanDisk-Open-Source/SSD_Dashboard
uefi/gcc/gcc-4.6.3/libgo/go/patch/git.go
GO
gpl-2.0
2,816
# encoding: utf-8 require 'spec_helper' require_dependency 'post_creator' describe CategoryUser do it 'allows batch set' do user = Fabricate(:user) category1 = Fabricate(:category) category2 = Fabricate(:category) watching = CategoryUser.where(user_id: user.id, notification_level: CategoryUser.notification_levels[:watching]) CategoryUser.batch_set(user, :watching, [category1.id, category2.id]) expect(watching.pluck(:category_id).sort).to eq [category1.id, category2.id] CategoryUser.batch_set(user, :watching, []) expect(watching.count).to eq 0 CategoryUser.batch_set(user, :watching, [category2.id]) expect(watching.count).to eq 1 end context 'integration' do before do ActiveRecord::Base.observers.enable :all end it 'should operate correctly' do watched_category = Fabricate(:category) muted_category = Fabricate(:category) tracked_category = Fabricate(:category) user = Fabricate(:user) CategoryUser.create!(user: user, category: watched_category, notification_level: CategoryUser.notification_levels[:watching]) CategoryUser.create!(user: user, category: muted_category, notification_level: CategoryUser.notification_levels[:muted]) CategoryUser.create!(user: user, category: tracked_category, notification_level: CategoryUser.notification_levels[:tracking]) watched_post = create_post(category: watched_category) muted_post = create_post(category: muted_category) tracked_post = create_post(category: tracked_category) expect(Notification.where(user_id: user.id, topic_id: watched_post.topic_id).count).to eq 1 expect(Notification.where(user_id: user.id, topic_id: tracked_post.topic_id).count).to eq 0 tu = TopicUser.get(tracked_post.topic, user) expect(tu.notification_level).to eq TopicUser.notification_levels[:tracking] expect(tu.notifications_reason_id).to eq TopicUser.notification_reasons[:auto_track_category] end it "watches categories that have been changed" do user = Fabricate(:user) watched_category = Fabricate(:category) CategoryUser.create!(user: user, category: watched_category, notification_level: CategoryUser.notification_levels[:watching]) post = create_post expect(TopicUser.get(post.topic, user)).to be_blank # Now, change the topic's category post.topic.change_category_to_id(watched_category.id) tu = TopicUser.get(post.topic, user) expect(tu.notification_level).to eq TopicUser.notification_levels[:watching] end it "unwatches categories that have been changed" do user = Fabricate(:user) watched_category = Fabricate(:category) CategoryUser.create!(user: user, category: watched_category, notification_level: CategoryUser.notification_levels[:watching]) post = create_post(category: watched_category) tu = TopicUser.get(post.topic, user) expect(tu.notification_level).to eq TopicUser.notification_levels[:watching] # Now, change the topic's category unwatched_category = Fabricate(:category) post.topic.change_category_to_id(unwatched_category.id) expect(TopicUser.get(post.topic, user)).to be_blank end end end
Procurem/Contraints
spec/models/category_user_spec.rb
Ruby
gpl-2.0
3,264
/** * Copyright (C) 2011 JTalks.org Team * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ package org.jtalks.jcommune.plugin.api.web.validation.validators; import org.jtalks.jcommune.plugin.api.service.PluginBbCodeService; import org.jtalks.jcommune.plugin.api.web.validation.annotations.BbCodeAwareSize; import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidatorContext; import org.springframework.beans.BeansException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; /** * Extends default @Size annotation to ignore BB codes in string. * As for now, applicable to string values only. * * @author Evgeniy Naumenko */ public class BbCodeAwareSizeValidator implements ConstraintValidator<BbCodeAwareSize, String>, ApplicationContextAware { public static final String NEW_LINE_HTML = "<br/>"; public static final String QUOTE_HTML = "&quot"; public static final String EMPTY_LIST_BB_REGEXP = "\\[list\\][\n\r\\s]*(\\[\\*\\][\n\r\\s]*)*\\[\\/list\\]"; private int min; private int max; private ApplicationContext context; private PluginBbCodeService bbCodeService; @Autowired public BbCodeAwareSizeValidator(PluginBbCodeService bbCodeService) { this.bbCodeService = bbCodeService; } /** * {@inheritDoc} */ @Override public void initialize(BbCodeAwareSize constraintAnnotation) { this.min = constraintAnnotation.min(); this.max = constraintAnnotation.max(); } /** * The database stores both bb codes and symbols visible for users. * Post length with bb codes can't be greater than max value. * {@inheritDoc} */ @Override public boolean isValid(String value, ConstraintValidatorContext context) { if (value != null) { String emptyListRemoved = removeEmptyListBb(value); String trimed = removeBBCodes(emptyListRemoved).trim(); int plainTextLength = getDisplayedLength(trimed); return plainTextLength >= min && value.length() <= max; } return false; } /** * Removes all BB codes from the text given, simply cutting * out all [...]-style tags found * * @param source text to cleanup * @return plain text without BB tags */ private String removeBBCodes(String source) { return getBBCodeService().stripBBCodes(source); } @Override public void setApplicationContext(ApplicationContext ac) throws BeansException { this.context = ac; } private PluginBbCodeService getBBCodeService() { if (bbCodeService == null) { bbCodeService = this.context.getBean(PluginBbCodeService.class); } return bbCodeService; } /** * Calculate length of string which be displayed. * Needed because method <b>removeBBCodes</b> leaves "&quot" and "<br/>" symbols. * @param s String to calculate length. * @return Length of string which be displayed. */ private int getDisplayedLength(String s) { return s.replaceAll(QUOTE_HTML, "\"").replaceAll(NEW_LINE_HTML, "\n\r").length(); } /** * Removes all empty lists from text. Needed because <b>removeBBCodes</b> deletes * bb codes for list but not deletes bb codes for list elements. * @param text Text to remove empty lists. * @return Text without empty lists. */ private String removeEmptyListBb(String text) { return text.replaceAll(EMPTY_LIST_BB_REGEXP, ""); } }
Noctrunal/jcommune
jcommune-plugin-api/src/main/java/org/jtalks/jcommune/plugin/api/web/validation/validators/BbCodeAwareSizeValidator.java
Java
lgpl-2.1
4,375
/* * Copyright 2011 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.collect.Lists; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.List; /** * <p>Compiler pass that converts all calls to: * goog.object.create(key1, val1, key2, val2, ...) where all of the keys * are literals into object literals.</p> * * @author agrieve@google.com (Andrew Grieve) */ final class ClosureOptimizePrimitives implements CompilerPass { /** Reference to the JS compiler */ private final AbstractCompiler compiler; /** * Identifies all calls to goog.object.create. */ private class FindObjectCreateCalls extends AbstractPostOrderCallback { List<Node> callNodes = Lists.newArrayList(); @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isCall()) { String fnName = n.getFirstChild().getQualifiedName(); if ("goog$object$create".equals(fnName) || "goog.object.create".equals(fnName)) { callNodes.add(n); } } } } /** * @param compiler The AbstractCompiler */ ClosureOptimizePrimitives(AbstractCompiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node root) { FindObjectCreateCalls pass = new FindObjectCreateCalls(); NodeTraversal.traverse(compiler, root, pass); processObjectCreateCalls(pass.callNodes); } /** * Converts all of the given call nodes to object literals that are safe to * do so. */ private void processObjectCreateCalls(List<Node> callNodes) { for (Node callNode : callNodes) { Node curParam = callNode.getFirstChild().getNext(); if (canOptimizeObjectCreate(curParam)) { Node objNode = IR.objectlit().srcref(callNode); while (curParam != null) { Node keyNode = curParam; Node valueNode = curParam.getNext(); curParam = valueNode.getNext(); callNode.removeChild(keyNode); callNode.removeChild(valueNode); if (!keyNode.isString()) { keyNode = IR.string(NodeUtil.getStringValue(keyNode)) .srcref(keyNode); } keyNode.setType(Token.STRING_KEY); keyNode.setQuotedString(); objNode.addChildToBack(IR.propdef(keyNode, valueNode)); } callNode.getParent().replaceChild(callNode, objNode); compiler.reportCodeChange(); } } } /** * Returns whether the given call to goog.object.create can be converted to an * object literal. */ private boolean canOptimizeObjectCreate(Node firstParam) { Node curParam = firstParam; while (curParam != null) { // All keys must be strings or numbers. if (!curParam.isString() && !curParam.isNumber()) { return false; } curParam = curParam.getNext(); // Check for an odd number of parameters. if (curParam == null) { return false; } curParam = curParam.getNext(); } return true; } }
jhiswin/idiil-closure-compiler
src/com/google/javascript/jscomp/ClosureOptimizePrimitives.java
Java
apache-2.0
3,783
/** * Copyright 2017 The AMP HTML Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ const POST_PARAMS = { 'embedtype': 'post', 'hash': 'Yc8_Z9pnpg8aKMZbVcD-jK45eAk', 'owner-id': '1', 'post-id': '45616', }; const POLL_PARAMS = { 'embedtype': 'poll', 'api-id': '6183531', 'poll-id': '274086843_1a2a465f60fff4699f', }; import '../amp-vk'; import {Layout} from '../../../../src/layout'; import {Resource} from '../../../../src/service/resource'; describes.realWin('amp-vk', { amp: { extensions: ['amp-vk'], }, }, env => { let win, doc; beforeEach(() => { win = env.win; doc = win.document; }); function createAmpVkElement(dataParams, layout) { const element = doc.createElement('amp-vk'); for (const param in dataParams) { element.setAttribute(`data-${param}`, dataParams[param]); } element.setAttribute('width', 500); element.setAttribute('height', 300); if (layout) { element.setAttribute('layout', layout); } doc.body.appendChild(element); return element.build().then(() => { const resource = Resource.forElement(element); resource.measure(); return element.layoutCallback(); }).then(() => element); } it('requires data-embedtype', () => { const params = Object.assign({}, POST_PARAMS); delete params['embedtype']; return createAmpVkElement(params).should.eventually.be.rejectedWith( /The data-embedtype attribute is required for/); }); it('removes iframe after unlayoutCallback', () => { return createAmpVkElement(POST_PARAMS).then(vkPost => { const iframe = vkPost.querySelector('iframe'); expect(iframe).to.not.be.null; const obj = vkPost.implementation_; obj.unlayoutCallback(); expect(vkPost.querySelector('iframe')).to.be.null; expect(obj.iframe_).to.be.null; expect(obj.unlayoutOnPause()).to.be.true; }); }); // Post tests it('post::requires data-hash', () => { const params = Object.assign({}, POST_PARAMS); delete params['hash']; return createAmpVkElement(params).should.eventually.be.rejectedWith( /The data-hash attribute is required for/); }); it('post::requires data-owner-id', () => { const params = Object.assign({}, POST_PARAMS); delete params['owner-id']; return createAmpVkElement(params).should.eventually.be.rejectedWith( /The data-owner-id attribute is required for/); }); it('post::requires data-post-id', () => { const params = Object.assign({}, POST_PARAMS); delete params['post-id']; return createAmpVkElement(params).should.eventually.be.rejectedWith( /The data-post-id attribute is required for/); }); it('post::renders iframe in amp-vk', () => { return createAmpVkElement(POST_PARAMS).then(vkPost => { const iframe = vkPost.querySelector('iframe'); expect(iframe).to.not.be.null; }); }); it('post::renders responsively', () => { return createAmpVkElement(POST_PARAMS, Layout.RESPONSIVE).then(vkPost => { const iframe = vkPost.querySelector('iframe'); expect(iframe).to.not.be.null; expect(iframe.className).to.match(/i-amphtml-fill-content/); }); }); it('post::sets correct src url to the vk iFrame', () => { return createAmpVkElement(POST_PARAMS, Layout.RESPONSIVE).then(vkPost => { const impl = vkPost.implementation_; const iframe = vkPost.querySelector('iframe'); const referrer = encodeURIComponent(vkPost.ownerDocument.referrer); const url = encodeURIComponent( vkPost.ownerDocument.location.href.replace(/#.*$/, '') ); impl.onLayoutMeasure(); const startWidth = impl.getLayoutWidth(); const correctIFrameSrc = `https://vk.com/widget_post.php?app=0&width=100%25\ &_ver=1&owner_id=1&post_id=45616&hash=Yc8_Z9pnpg8aKMZbVcD-jK45eAk&amp=1\ &startWidth=${startWidth}&url=${url}&referrer=${referrer}&title=AMP%20Post`; expect(iframe).to.not.be.null; const timeArgPosition = iframe.src.lastIndexOf('&'); const iframeSrcWithoutTime = iframe.src.substr(0, timeArgPosition); expect(iframeSrcWithoutTime).to.equal(correctIFrameSrc); }); }); // Poll tests it('poll::requires data-api-id', () => { const params = Object.assign({}, POLL_PARAMS); delete params['api-id']; return createAmpVkElement(params).should.eventually.be.rejectedWith( /The data-api-id attribute is required for/); }); it('poll::requires data-poll-id', () => { const params = Object.assign({}, POLL_PARAMS); delete params['poll-id']; return createAmpVkElement(params).should.eventually.be.rejectedWith( /The data-poll-id attribute is required for/); }); it('poll::renders iframe in amp-vk', () => { return createAmpVkElement(POLL_PARAMS).then(vkPoll => { const iframe = vkPoll.querySelector('iframe'); expect(iframe).to.not.be.null; }); }); it('poll::renders responsively', () => { return createAmpVkElement(POLL_PARAMS, Layout.RESPONSIVE).then(vkPoll => { const iframe = vkPoll.querySelector('iframe'); expect(iframe).to.not.be.null; expect(iframe.className).to.match(/i-amphtml-fill-content/); }); }); it('poll::sets correct src url to the vk iFrame', () => { return createAmpVkElement(POLL_PARAMS, Layout.RESPONSIVE).then(vkPoll => { const iframe = vkPoll.querySelector('iframe'); const referrer = encodeURIComponent(vkPoll.ownerDocument.referrer); const url = encodeURIComponent( vkPoll.ownerDocument.location.href.replace(/#.*$/, '') ); const correctIFrameSrc = `https://vk.com/al_widget_poll.php?\ app=6183531&width=100%25&_ver=1&poll_id=274086843_1a2a465f60fff4699f&amp=1\ &url=${url}&title=AMP%20Poll&description=&referrer=${referrer}`; expect(iframe).to.not.be.null; const timeArgPosition = iframe.src.lastIndexOf('&'); const iframeSrcWithoutTime = iframe.src.substr(0, timeArgPosition); expect(iframeSrcWithoutTime).to.equal(correctIFrameSrc); }); }); it('both::resizes amp-vk element in response to postmessages', () => { return createAmpVkElement(POLL_PARAMS).then(vkPoll => { const impl = vkPoll.implementation_; const iframe = vkPoll.querySelector('iframe'); const changeHeight = sandbox.spy(impl, 'changeHeight'); const fakeHeight = 555; expect(iframe).to.not.be.null; generatePostMessage(vkPoll, iframe, fakeHeight); expect(changeHeight).to.be.calledOnce; expect(changeHeight.firstCall.args[0]).to.equal(fakeHeight); }); }); function generatePostMessage(ins, iframe, height) { ins.implementation_.handleVkIframeMessage_({ origin: 'https://vk.com', source: iframe.contentWindow, data: JSON.stringify([ 'resize', [height], ]), }); } });
engtat/amphtml
extensions/amp-vk/0.1/test/test-amp-vk.js
JavaScript
apache-2.0
7,420
/* Copyright 2014 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package userspace import ( "fmt" "net" "strconv" "strings" "sync" "sync/atomic" "syscall" "time" "github.com/golang/glog" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/proxy" "k8s.io/kubernetes/pkg/types" "k8s.io/kubernetes/pkg/util" "k8s.io/kubernetes/pkg/util/errors" "k8s.io/kubernetes/pkg/util/iptables" ) type portal struct { ip net.IP port int isExternal bool } type serviceInfo struct { isAliveAtomic int32 // Only access this with atomic ops portal portal protocol api.Protocol proxyPort int socket proxySocket timeout time.Duration activeClients *clientCache nodePort int loadBalancerStatus api.LoadBalancerStatus sessionAffinityType api.ServiceAffinity stickyMaxAgeMinutes int // Deprecated, but required for back-compat (including e2e) externalIPs []string } func (info *serviceInfo) setAlive(b bool) { var i int32 if b { i = 1 } atomic.StoreInt32(&info.isAliveAtomic, i) } func (info *serviceInfo) isAlive() bool { return atomic.LoadInt32(&info.isAliveAtomic) != 0 } func logTimeout(err error) bool { if e, ok := err.(net.Error); ok { if e.Timeout() { glog.V(3).Infof("connection to endpoint closed due to inactivity") return true } } return false } // Proxier is a simple proxy for TCP connections between a localhost:lport // and services that provide the actual implementations. type Proxier struct { loadBalancer LoadBalancer mu sync.Mutex // protects serviceMap serviceMap map[proxy.ServicePortName]*serviceInfo syncPeriod time.Duration portMapMutex sync.Mutex portMap map[portMapKey]*portMapValue numProxyLoops int32 // use atomic ops to access this; mostly for testing listenIP net.IP iptables iptables.Interface hostIP net.IP proxyPorts PortAllocator } // assert Proxier is a ProxyProvider var _ proxy.ProxyProvider = &Proxier{} // A key for the portMap. The ip has to be a tring because slices can't be map // keys. type portMapKey struct { ip string port int protocol api.Protocol } func (k *portMapKey) String() string { return fmt.Sprintf("%s:%d/%s", k.ip, k.port, k.protocol) } // A value for the portMap type portMapValue struct { owner proxy.ServicePortName socket interface { Close() error } } var ( // ErrProxyOnLocalhost is returned by NewProxier if the user requests a proxier on // the loopback address. May be checked for by callers of NewProxier to know whether // the caller provided invalid input. ErrProxyOnLocalhost = fmt.Errorf("cannot proxy on localhost") ) // IsProxyLocked returns true if the proxy could not acquire the lock on iptables. func IsProxyLocked(err error) bool { return strings.Contains(err.Error(), "holding the xtables lock") } // NewProxier returns a new Proxier given a LoadBalancer and an address on // which to listen. Because of the iptables logic, It is assumed that there // is only a single Proxier active on a machine. An error will be returned if // the proxier cannot be started due to an invalid ListenIP (loopback) or // if iptables fails to update or acquire the initial lock. Once a proxier is // created, it will keep iptables up to date in the background and will not // terminate if a particular iptables call fails. func NewProxier(loadBalancer LoadBalancer, listenIP net.IP, iptables iptables.Interface, pr util.PortRange, syncPeriod time.Duration) (*Proxier, error) { if listenIP.Equal(localhostIPv4) || listenIP.Equal(localhostIPv6) { return nil, ErrProxyOnLocalhost } hostIP, err := util.ChooseHostInterface() if err != nil { return nil, fmt.Errorf("failed to select a host interface: %v", err) } err = setRLimit(64 * 1000) if err != nil { return nil, fmt.Errorf("failed to set open file handler limit", err) } proxyPorts := newPortAllocator(pr) glog.V(2).Infof("Setting proxy IP to %v and initializing iptables", hostIP) return createProxier(loadBalancer, listenIP, iptables, hostIP, proxyPorts, syncPeriod) } func setRLimit(limit uint64) error { return syscall.Setrlimit(syscall.RLIMIT_NOFILE, &syscall.Rlimit{Max: limit, Cur: limit}) } func createProxier(loadBalancer LoadBalancer, listenIP net.IP, iptables iptables.Interface, hostIP net.IP, proxyPorts PortAllocator, syncPeriod time.Duration) (*Proxier, error) { // convenient to pass nil for tests.. if proxyPorts == nil { proxyPorts = newPortAllocator(util.PortRange{}) } // Set up the iptables foundations we need. if err := iptablesInit(iptables); err != nil { return nil, fmt.Errorf("failed to initialize iptables: %v", err) } // Flush old iptables rules (since the bound ports will be invalid after a restart). // When OnUpdate() is first called, the rules will be recreated. if err := iptablesFlush(iptables); err != nil { return nil, fmt.Errorf("failed to flush iptables: %v", err) } return &Proxier{ loadBalancer: loadBalancer, serviceMap: make(map[proxy.ServicePortName]*serviceInfo), portMap: make(map[portMapKey]*portMapValue), syncPeriod: syncPeriod, listenIP: listenIP, iptables: iptables, hostIP: hostIP, proxyPorts: proxyPorts, }, nil } // CleanupLeftovers removes all iptables rules and chains created by the Proxier // It returns true if an error was encountered. Errors are logged. func CleanupLeftovers(ipt iptables.Interface) (encounteredError bool) { // NOTE: Warning, this needs to be kept in sync with the userspace Proxier, // we want to ensure we remove all of the iptables rules it creates. // Currently they are all in iptablesInit() // Delete Rules first, then Flush and Delete Chains args := []string{"-m", "comment", "--comment", "handle ClusterIPs; NOTE: this must be before the NodePort rules"} if err := ipt.DeleteRule(iptables.TableNAT, iptables.ChainOutput, append(args, "-j", string(iptablesHostPortalChain))...); err != nil { glog.Errorf("Error removing userspace rule: %v", err) encounteredError = true } if err := ipt.DeleteRule(iptables.TableNAT, iptables.ChainPrerouting, append(args, "-j", string(iptablesContainerPortalChain))...); err != nil { glog.Errorf("Error removing userspace rule: %v", err) encounteredError = true } args = []string{"-m", "addrtype", "--dst-type", "LOCAL"} args = append(args, "-m", "comment", "--comment", "handle service NodePorts; NOTE: this must be the last rule in the chain") if err := ipt.DeleteRule(iptables.TableNAT, iptables.ChainOutput, append(args, "-j", string(iptablesHostNodePortChain))...); err != nil { glog.Errorf("Error removing userspace rule: %v", err) encounteredError = true } if err := ipt.DeleteRule(iptables.TableNAT, iptables.ChainPrerouting, append(args, "-j", string(iptablesContainerNodePortChain))...); err != nil { glog.Errorf("Error removing userspace rule: %v", err) encounteredError = true } // flush and delete chains. chains := []iptables.Chain{iptablesContainerPortalChain, iptablesHostPortalChain, iptablesHostNodePortChain, iptablesContainerNodePortChain} for _, c := range chains { // flush chain, then if sucessful delete, delete will fail if flush fails. if err := ipt.FlushChain(iptables.TableNAT, c); err != nil { glog.Errorf("Error flushing userspace chain: %v", err) encounteredError = true } else { if err = ipt.DeleteChain(iptables.TableNAT, c); err != nil { glog.Errorf("Error deleting userspace chain: %v", err) encounteredError = true } } } return encounteredError } // Sync is called to immediately synchronize the proxier state to iptables func (proxier *Proxier) Sync() { if err := iptablesInit(proxier.iptables); err != nil { glog.Errorf("Failed to ensure iptables: %v", err) } proxier.ensurePortals() proxier.cleanupStaleStickySessions() } // SyncLoop runs periodic work. This is expected to run as a goroutine or as the main loop of the app. It does not return. func (proxier *Proxier) SyncLoop() { t := time.NewTicker(proxier.syncPeriod) defer t.Stop() for { <-t.C glog.V(6).Infof("Periodic sync") proxier.Sync() } } // Ensure that portals exist for all services. func (proxier *Proxier) ensurePortals() { proxier.mu.Lock() defer proxier.mu.Unlock() // NB: This does not remove rules that should not be present. for name, info := range proxier.serviceMap { err := proxier.openPortal(name, info) if err != nil { glog.Errorf("Failed to ensure portal for %q: %v", name, err) } } } // clean up any stale sticky session records in the hash map. func (proxier *Proxier) cleanupStaleStickySessions() { proxier.mu.Lock() defer proxier.mu.Unlock() for name := range proxier.serviceMap { proxier.loadBalancer.CleanupStaleStickySessions(name) } } // This assumes proxier.mu is not locked. func (proxier *Proxier) stopProxy(service proxy.ServicePortName, info *serviceInfo) error { proxier.mu.Lock() defer proxier.mu.Unlock() return proxier.stopProxyInternal(service, info) } // This assumes proxier.mu is locked. func (proxier *Proxier) stopProxyInternal(service proxy.ServicePortName, info *serviceInfo) error { delete(proxier.serviceMap, service) info.setAlive(false) err := info.socket.Close() port := info.socket.ListenPort() proxier.proxyPorts.Release(port) return err } func (proxier *Proxier) getServiceInfo(service proxy.ServicePortName) (*serviceInfo, bool) { proxier.mu.Lock() defer proxier.mu.Unlock() info, ok := proxier.serviceMap[service] return info, ok } func (proxier *Proxier) setServiceInfo(service proxy.ServicePortName, info *serviceInfo) { proxier.mu.Lock() defer proxier.mu.Unlock() proxier.serviceMap[service] = info } // addServiceOnPort starts listening for a new service, returning the serviceInfo. // Pass proxyPort=0 to allocate a random port. The timeout only applies to UDP // connections, for now. func (proxier *Proxier) addServiceOnPort(service proxy.ServicePortName, protocol api.Protocol, proxyPort int, timeout time.Duration) (*serviceInfo, error) { sock, err := newProxySocket(protocol, proxier.listenIP, proxyPort) if err != nil { return nil, err } _, portStr, err := net.SplitHostPort(sock.Addr().String()) if err != nil { sock.Close() return nil, err } portNum, err := strconv.Atoi(portStr) if err != nil { sock.Close() return nil, err } si := &serviceInfo{ isAliveAtomic: 1, proxyPort: portNum, protocol: protocol, socket: sock, timeout: timeout, activeClients: newClientCache(), sessionAffinityType: api.ServiceAffinityNone, // default stickyMaxAgeMinutes: 180, // TODO: parameterize this in the API. } proxier.setServiceInfo(service, si) glog.V(2).Infof("Proxying for service %q on %s port %d", service, protocol, portNum) go func(service proxy.ServicePortName, proxier *Proxier) { defer util.HandleCrash() atomic.AddInt32(&proxier.numProxyLoops, 1) sock.ProxyLoop(service, si, proxier) atomic.AddInt32(&proxier.numProxyLoops, -1) }(service, proxier) return si, nil } // How long we leave idle UDP connections open. const udpIdleTimeout = 1 * time.Second // OnUpdate manages the active set of service proxies. // Active service proxies are reinitialized if found in the update set or // shutdown if missing from the update set. func (proxier *Proxier) OnServiceUpdate(services []api.Service) { glog.V(4).Infof("Received update notice: %+v", services) activeServices := make(map[proxy.ServicePortName]bool) // use a map as a set for i := range services { service := &services[i] // if ClusterIP is "None" or empty, skip proxying if !api.IsServiceIPSet(service) { glog.V(3).Infof("Skipping service %s due to clusterIP = %q", types.NamespacedName{Namespace: service.Namespace, Name: service.Name}, service.Spec.ClusterIP) continue } for i := range service.Spec.Ports { servicePort := &service.Spec.Ports[i] serviceName := proxy.ServicePortName{NamespacedName: types.NamespacedName{Namespace: service.Namespace, Name: service.Name}, Port: servicePort.Name} activeServices[serviceName] = true serviceIP := net.ParseIP(service.Spec.ClusterIP) info, exists := proxier.getServiceInfo(serviceName) // TODO: check health of the socket? What if ProxyLoop exited? if exists && sameConfig(info, service, servicePort) { // Nothing changed. continue } if exists { glog.V(4).Infof("Something changed for service %q: stopping it", serviceName) err := proxier.closePortal(serviceName, info) if err != nil { glog.Errorf("Failed to close portal for %q: %v", serviceName, err) } err = proxier.stopProxy(serviceName, info) if err != nil { glog.Errorf("Failed to stop service %q: %v", serviceName, err) } } proxyPort, err := proxier.proxyPorts.AllocateNext() if err != nil { glog.Errorf("failed to allocate proxy port for service %q: %v", serviceName, err) continue } glog.V(1).Infof("Adding new service %q at %s:%d/%s", serviceName, serviceIP, servicePort.Port, servicePort.Protocol) info, err = proxier.addServiceOnPort(serviceName, servicePort.Protocol, proxyPort, udpIdleTimeout) if err != nil { glog.Errorf("Failed to start proxy for %q: %v", serviceName, err) continue } info.portal.ip = serviceIP info.portal.port = servicePort.Port info.externalIPs = service.Spec.ExternalIPs // Deep-copy in case the service instance changes info.loadBalancerStatus = *api.LoadBalancerStatusDeepCopy(&service.Status.LoadBalancer) info.nodePort = servicePort.NodePort info.sessionAffinityType = service.Spec.SessionAffinity glog.V(4).Infof("info: %+v", info) err = proxier.openPortal(serviceName, info) if err != nil { glog.Errorf("Failed to open portal for %q: %v", serviceName, err) } proxier.loadBalancer.NewService(serviceName, info.sessionAffinityType, info.stickyMaxAgeMinutes) } } proxier.mu.Lock() defer proxier.mu.Unlock() for name, info := range proxier.serviceMap { if !activeServices[name] { glog.V(1).Infof("Stopping service %q", name) err := proxier.closePortal(name, info) if err != nil { glog.Errorf("Failed to close portal for %q: %v", name, err) } err = proxier.stopProxyInternal(name, info) if err != nil { glog.Errorf("Failed to stop service %q: %v", name, err) } } } } func sameConfig(info *serviceInfo, service *api.Service, port *api.ServicePort) bool { if info.protocol != port.Protocol || info.portal.port != port.Port || info.nodePort != port.NodePort { return false } if !info.portal.ip.Equal(net.ParseIP(service.Spec.ClusterIP)) { return false } if !ipsEqual(info.externalIPs, service.Spec.ExternalIPs) { return false } if !api.LoadBalancerStatusEqual(&info.loadBalancerStatus, &service.Status.LoadBalancer) { return false } if info.sessionAffinityType != service.Spec.SessionAffinity { return false } return true } func ipsEqual(lhs, rhs []string) bool { if len(lhs) != len(rhs) { return false } for i := range lhs { if lhs[i] != rhs[i] { return false } } return true } func (proxier *Proxier) openPortal(service proxy.ServicePortName, info *serviceInfo) error { err := proxier.openOnePortal(info.portal, info.protocol, proxier.listenIP, info.proxyPort, service) if err != nil { return err } for _, publicIP := range info.externalIPs { err = proxier.openOnePortal(portal{net.ParseIP(publicIP), info.portal.port, true}, info.protocol, proxier.listenIP, info.proxyPort, service) if err != nil { return err } } for _, ingress := range info.loadBalancerStatus.Ingress { if ingress.IP != "" { err = proxier.openOnePortal(portal{net.ParseIP(ingress.IP), info.portal.port, false}, info.protocol, proxier.listenIP, info.proxyPort, service) if err != nil { return err } } } if info.nodePort != 0 { err = proxier.openNodePort(info.nodePort, info.protocol, proxier.listenIP, info.proxyPort, service) if err != nil { return err } } return nil } func (proxier *Proxier) openOnePortal(portal portal, protocol api.Protocol, proxyIP net.IP, proxyPort int, name proxy.ServicePortName) error { if local, err := isLocalIP(portal.ip); err != nil { return fmt.Errorf("can't determine if IP is local, assuming not: %v", err) } else if local { err := proxier.claimNodePort(portal.ip, portal.port, protocol, name) if err != nil { return err } } // Handle traffic from containers. args := proxier.iptablesContainerPortalArgs(portal.ip, portal.isExternal, false, portal.port, protocol, proxyIP, proxyPort, name) existed, err := proxier.iptables.EnsureRule(iptables.Append, iptables.TableNAT, iptablesContainerPortalChain, args...) if err != nil { glog.Errorf("Failed to install iptables %s rule for service %q, args:%v", iptablesContainerPortalChain, name, args) return err } if !existed { glog.V(3).Infof("Opened iptables from-containers portal for service %q on %s %s:%d", name, protocol, portal.ip, portal.port) } if portal.isExternal { args := proxier.iptablesContainerPortalArgs(portal.ip, false, true, portal.port, protocol, proxyIP, proxyPort, name) existed, err := proxier.iptables.EnsureRule(iptables.Append, iptables.TableNAT, iptablesContainerPortalChain, args...) if err != nil { glog.Errorf("Failed to install iptables %s rule that opens service %q for local traffic, args:%v", iptablesContainerPortalChain, name, args) return err } if !existed { glog.V(3).Infof("Opened iptables from-containers portal for service %q on %s %s:%d for local traffic", name, protocol, portal.ip, portal.port) } args = proxier.iptablesHostPortalArgs(portal.ip, true, portal.port, protocol, proxyIP, proxyPort, name) existed, err = proxier.iptables.EnsureRule(iptables.Append, iptables.TableNAT, iptablesHostPortalChain, args...) if err != nil { glog.Errorf("Failed to install iptables %s rule for service %q for dst-local traffic", iptablesHostPortalChain, name) return err } if !existed { glog.V(3).Infof("Opened iptables from-host portal for service %q on %s %s:%d for dst-local traffic", name, protocol, portal.ip, portal.port) } return nil } // Handle traffic from the host. args = proxier.iptablesHostPortalArgs(portal.ip, false, portal.port, protocol, proxyIP, proxyPort, name) existed, err = proxier.iptables.EnsureRule(iptables.Append, iptables.TableNAT, iptablesHostPortalChain, args...) if err != nil { glog.Errorf("Failed to install iptables %s rule for service %q", iptablesHostPortalChain, name) return err } if !existed { glog.V(3).Infof("Opened iptables from-host portal for service %q on %s %s:%d", name, protocol, portal.ip, portal.port) } return nil } // Marks a port as being owned by a particular service, or returns error if already claimed. // Idempotent: reclaiming with the same owner is not an error func (proxier *Proxier) claimNodePort(ip net.IP, port int, protocol api.Protocol, owner proxy.ServicePortName) error { proxier.portMapMutex.Lock() defer proxier.portMapMutex.Unlock() // TODO: We could pre-populate some reserved ports into portMap and/or blacklist some well-known ports key := portMapKey{ip: ip.String(), port: port, protocol: protocol} existing, found := proxier.portMap[key] if !found { // Hold the actual port open, even though we use iptables to redirect // it. This ensures that a) it's safe to take and b) that stays true. // NOTE: We should not need to have a real listen()ing socket - bind() // should be enough, but I can't figure out a way to e2e test without // it. Tools like 'ss' and 'netstat' do not show sockets that are // bind()ed but not listen()ed, and at least the default debian netcat // has no way to avoid about 10 seconds of retries. socket, err := newProxySocket(protocol, ip, port) if err != nil { return fmt.Errorf("can't open node port for %s: %v", key.String(), err) } proxier.portMap[key] = &portMapValue{owner: owner, socket: socket} glog.V(2).Infof("Claimed local port %s", key.String()) return nil } if existing.owner == owner { // We are idempotent return nil } return fmt.Errorf("Port conflict detected on port %s. %v vs %v", key.String(), owner, existing) } // Release a claim on a port. Returns an error if the owner does not match the claim. // Tolerates release on an unclaimed port, to simplify . func (proxier *Proxier) releaseNodePort(ip net.IP, port int, protocol api.Protocol, owner proxy.ServicePortName) error { proxier.portMapMutex.Lock() defer proxier.portMapMutex.Unlock() key := portMapKey{ip: ip.String(), port: port, protocol: protocol} existing, found := proxier.portMap[key] if !found { // We tolerate this, it happens if we are cleaning up a failed allocation glog.Infof("Ignoring release on unowned port: %v", key) return nil } if existing.owner != owner { return fmt.Errorf("Port conflict detected on port %v (unowned unlock). %v vs %v", key, owner, existing) } delete(proxier.portMap, key) existing.socket.Close() return nil } func (proxier *Proxier) openNodePort(nodePort int, protocol api.Protocol, proxyIP net.IP, proxyPort int, name proxy.ServicePortName) error { // TODO: Do we want to allow containers to access public services? Probably yes. // TODO: We could refactor this to be the same code as portal, but with IP == nil err := proxier.claimNodePort(nil, nodePort, protocol, name) if err != nil { return err } // Handle traffic from containers. args := proxier.iptablesContainerNodePortArgs(nodePort, protocol, proxyIP, proxyPort, name) existed, err := proxier.iptables.EnsureRule(iptables.Append, iptables.TableNAT, iptablesContainerNodePortChain, args...) if err != nil { glog.Errorf("Failed to install iptables %s rule for service %q", iptablesContainerNodePortChain, name) return err } if !existed { glog.Infof("Opened iptables from-containers public port for service %q on %s port %d", name, protocol, nodePort) } // Handle traffic from the host. args = proxier.iptablesHostNodePortArgs(nodePort, protocol, proxyIP, proxyPort, name) existed, err = proxier.iptables.EnsureRule(iptables.Append, iptables.TableNAT, iptablesHostNodePortChain, args...) if err != nil { glog.Errorf("Failed to install iptables %s rule for service %q", iptablesHostNodePortChain, name) return err } if !existed { glog.Infof("Opened iptables from-host public port for service %q on %s port %d", name, protocol, nodePort) } return nil } func (proxier *Proxier) closePortal(service proxy.ServicePortName, info *serviceInfo) error { // Collect errors and report them all at the end. el := proxier.closeOnePortal(info.portal, info.protocol, proxier.listenIP, info.proxyPort, service) for _, publicIP := range info.externalIPs { el = append(el, proxier.closeOnePortal(portal{net.ParseIP(publicIP), info.portal.port, true}, info.protocol, proxier.listenIP, info.proxyPort, service)...) } for _, ingress := range info.loadBalancerStatus.Ingress { if ingress.IP != "" { el = append(el, proxier.closeOnePortal(portal{net.ParseIP(ingress.IP), info.portal.port, false}, info.protocol, proxier.listenIP, info.proxyPort, service)...) } } if info.nodePort != 0 { el = append(el, proxier.closeNodePort(info.nodePort, info.protocol, proxier.listenIP, info.proxyPort, service)...) } if len(el) == 0 { glog.V(3).Infof("Closed iptables portals for service %q", service) } else { glog.Errorf("Some errors closing iptables portals for service %q", service) } return errors.NewAggregate(el) } func (proxier *Proxier) closeOnePortal(portal portal, protocol api.Protocol, proxyIP net.IP, proxyPort int, name proxy.ServicePortName) []error { el := []error{} if local, err := isLocalIP(portal.ip); err != nil { el = append(el, fmt.Errorf("can't determine if IP is local, assuming not: %v", err)) } else if local { if err := proxier.releaseNodePort(nil, portal.port, protocol, name); err != nil { el = append(el, err) } } // Handle traffic from containers. args := proxier.iptablesContainerPortalArgs(portal.ip, portal.isExternal, false, portal.port, protocol, proxyIP, proxyPort, name) if err := proxier.iptables.DeleteRule(iptables.TableNAT, iptablesContainerPortalChain, args...); err != nil { glog.Errorf("Failed to delete iptables %s rule for service %q", iptablesContainerPortalChain, name) el = append(el, err) } if portal.isExternal { args := proxier.iptablesContainerPortalArgs(portal.ip, false, true, portal.port, protocol, proxyIP, proxyPort, name) if err := proxier.iptables.DeleteRule(iptables.TableNAT, iptablesContainerPortalChain, args...); err != nil { glog.Errorf("Failed to delete iptables %s rule for service %q", iptablesContainerPortalChain, name) el = append(el, err) } args = proxier.iptablesHostPortalArgs(portal.ip, true, portal.port, protocol, proxyIP, proxyPort, name) if err := proxier.iptables.DeleteRule(iptables.TableNAT, iptablesHostPortalChain, args...); err != nil { glog.Errorf("Failed to delete iptables %s rule for service %q", iptablesHostPortalChain, name) el = append(el, err) } return el } // Handle traffic from the host (portalIP is not external). args = proxier.iptablesHostPortalArgs(portal.ip, false, portal.port, protocol, proxyIP, proxyPort, name) if err := proxier.iptables.DeleteRule(iptables.TableNAT, iptablesHostPortalChain, args...); err != nil { glog.Errorf("Failed to delete iptables %s rule for service %q", iptablesHostPortalChain, name) el = append(el, err) } return el } func (proxier *Proxier) closeNodePort(nodePort int, protocol api.Protocol, proxyIP net.IP, proxyPort int, name proxy.ServicePortName) []error { el := []error{} // Handle traffic from containers. args := proxier.iptablesContainerNodePortArgs(nodePort, protocol, proxyIP, proxyPort, name) if err := proxier.iptables.DeleteRule(iptables.TableNAT, iptablesContainerNodePortChain, args...); err != nil { glog.Errorf("Failed to delete iptables %s rule for service %q", iptablesContainerNodePortChain, name) el = append(el, err) } // Handle traffic from the host. args = proxier.iptablesHostNodePortArgs(nodePort, protocol, proxyIP, proxyPort, name) if err := proxier.iptables.DeleteRule(iptables.TableNAT, iptablesHostNodePortChain, args...); err != nil { glog.Errorf("Failed to delete iptables %s rule for service %q", iptablesHostNodePortChain, name) el = append(el, err) } if err := proxier.releaseNodePort(nil, nodePort, protocol, name); err != nil { el = append(el, err) } return el } func isLocalIP(ip net.IP) (bool, error) { addrs, err := net.InterfaceAddrs() if err != nil { return false, err } for i := range addrs { intf, _, err := net.ParseCIDR(addrs[i].String()) if err != nil { return false, err } if ip.Equal(intf) { return true, nil } } return false, nil } // See comments in the *PortalArgs() functions for some details about why we // use two chains for portals. var iptablesContainerPortalChain iptables.Chain = "KUBE-PORTALS-CONTAINER" var iptablesHostPortalChain iptables.Chain = "KUBE-PORTALS-HOST" // Chains for NodePort services var iptablesContainerNodePortChain iptables.Chain = "KUBE-NODEPORT-CONTAINER" var iptablesHostNodePortChain iptables.Chain = "KUBE-NODEPORT-HOST" // Ensure that the iptables infrastructure we use is set up. This can safely be called periodically. func iptablesInit(ipt iptables.Interface) error { // TODO: There is almost certainly room for optimization here. E.g. If // we knew the service-cluster-ip-range CIDR we could fast-track outbound packets not // destined for a service. There's probably more, help wanted. // Danger - order of these rules matters here: // // We match portal rules first, then NodePort rules. For NodePort rules, we filter primarily on --dst-type LOCAL, // because we want to listen on all local addresses, but don't match internet traffic with the same dst port number. // // There is one complication (per thockin): // -m addrtype --dst-type LOCAL is what we want except that it is broken (by intent without foresight to our usecase) // on at least GCE. Specifically, GCE machines have a daemon which learns what external IPs are forwarded to that // machine, and configure a local route for that IP, making a match for --dst-type LOCAL when we don't want it to. // Removing the route gives correct behavior until the daemon recreates it. // Killing the daemon is an option, but means that any non-kubernetes use of the machine with external IP will be broken. // // This applies to IPs on GCE that are actually from a load-balancer; they will be categorized as LOCAL. // _If_ the chains were in the wrong order, and the LB traffic had dst-port == a NodePort on some other service, // the NodePort would take priority (incorrectly). // This is unlikely (and would only affect outgoing traffic from the cluster to the load balancer, which seems // doubly-unlikely), but we need to be careful to keep the rules in the right order. args := []string{ /* service-cluster-ip-range matching could go here */ } args = append(args, "-m", "comment", "--comment", "handle ClusterIPs; NOTE: this must be before the NodePort rules") if _, err := ipt.EnsureChain(iptables.TableNAT, iptablesContainerPortalChain); err != nil { return err } if _, err := ipt.EnsureRule(iptables.Prepend, iptables.TableNAT, iptables.ChainPrerouting, append(args, "-j", string(iptablesContainerPortalChain))...); err != nil { return err } if _, err := ipt.EnsureChain(iptables.TableNAT, iptablesHostPortalChain); err != nil { return err } if _, err := ipt.EnsureRule(iptables.Prepend, iptables.TableNAT, iptables.ChainOutput, append(args, "-j", string(iptablesHostPortalChain))...); err != nil { return err } // This set of rules matches broadly (addrtype & destination port), and therefore must come after the portal rules args = []string{"-m", "addrtype", "--dst-type", "LOCAL"} args = append(args, "-m", "comment", "--comment", "handle service NodePorts; NOTE: this must be the last rule in the chain") if _, err := ipt.EnsureChain(iptables.TableNAT, iptablesContainerNodePortChain); err != nil { return err } if _, err := ipt.EnsureRule(iptables.Append, iptables.TableNAT, iptables.ChainPrerouting, append(args, "-j", string(iptablesContainerNodePortChain))...); err != nil { return err } if _, err := ipt.EnsureChain(iptables.TableNAT, iptablesHostNodePortChain); err != nil { return err } if _, err := ipt.EnsureRule(iptables.Append, iptables.TableNAT, iptables.ChainOutput, append(args, "-j", string(iptablesHostNodePortChain))...); err != nil { return err } // TODO: Verify order of rules. return nil } // Flush all of our custom iptables rules. func iptablesFlush(ipt iptables.Interface) error { el := []error{} if err := ipt.FlushChain(iptables.TableNAT, iptablesContainerPortalChain); err != nil { el = append(el, err) } if err := ipt.FlushChain(iptables.TableNAT, iptablesHostPortalChain); err != nil { el = append(el, err) } if err := ipt.FlushChain(iptables.TableNAT, iptablesContainerNodePortChain); err != nil { el = append(el, err) } if err := ipt.FlushChain(iptables.TableNAT, iptablesHostNodePortChain); err != nil { el = append(el, err) } if len(el) != 0 { glog.Errorf("Some errors flushing old iptables portals: %v", el) } return errors.NewAggregate(el) } // Used below. var zeroIPv4 = net.ParseIP("0.0.0.0") var localhostIPv4 = net.ParseIP("127.0.0.1") var zeroIPv6 = net.ParseIP("::0") var localhostIPv6 = net.ParseIP("::1") // Build a slice of iptables args that are common to from-container and from-host portal rules. func iptablesCommonPortalArgs(destIP net.IP, addPhysicalInterfaceMatch bool, addDstLocalMatch bool, destPort int, protocol api.Protocol, service proxy.ServicePortName) []string { // This list needs to include all fields as they are eventually spit out // by iptables-save. This is because some systems do not support the // 'iptables -C' arg, and so fall back on parsing iptables-save output. // If this does not match, it will not pass the check. For example: // adding the /32 on the destination IP arg is not strictly required, // but causes this list to not match the final iptables-save output. // This is fragile and I hope one day we can stop supporting such old // iptables versions. args := []string{ "-m", "comment", "--comment", service.String(), "-p", strings.ToLower(string(protocol)), "-m", strings.ToLower(string(protocol)), "--dport", fmt.Sprintf("%d", destPort), } if destIP != nil { args = append(args, "-d", fmt.Sprintf("%s/32", destIP.String())) } if addPhysicalInterfaceMatch { args = append(args, "-m", "physdev", "!", "--physdev-is-in") } if addDstLocalMatch { args = append(args, "-m", "addrtype", "--dst-type", "LOCAL") } return args } // Build a slice of iptables args for a from-container portal rule. func (proxier *Proxier) iptablesContainerPortalArgs(destIP net.IP, addPhysicalInterfaceMatch bool, addDstLocalMatch bool, destPort int, protocol api.Protocol, proxyIP net.IP, proxyPort int, service proxy.ServicePortName) []string { args := iptablesCommonPortalArgs(destIP, addPhysicalInterfaceMatch, addDstLocalMatch, destPort, protocol, service) // This is tricky. // // If the proxy is bound (see Proxier.listenIP) to 0.0.0.0 ("any // interface") we want to use REDIRECT, which sends traffic to the // "primary address of the incoming interface" which means the container // bridge, if there is one. When the response comes, it comes from that // same interface, so the NAT matches and the response packet is // correct. This matters for UDP, since there is no per-connection port // number. // // The alternative would be to use DNAT, except that it doesn't work // (empirically): // * DNAT to 127.0.0.1 = Packets just disappear - this seems to be a // well-known limitation of iptables. // * DNAT to eth0's IP = Response packets come from the bridge, which // breaks the NAT, and makes things like DNS not accept them. If // this could be resolved, it would simplify all of this code. // // If the proxy is bound to a specific IP, then we have to use DNAT to // that IP. Unlike the previous case, this works because the proxy is // ONLY listening on that IP, not the bridge. // // Why would anyone bind to an address that is not inclusive of // localhost? Apparently some cloud environments have their public IP // exposed as a real network interface AND do not have firewalling. We // don't want to expose everything out to the world. // // Unfortunately, I don't know of any way to listen on some (N > 1) // interfaces but not ALL interfaces, short of doing it manually, and // this is simpler than that. // // If the proxy is bound to localhost only, all of this is broken. Not // allowed. if proxyIP.Equal(zeroIPv4) || proxyIP.Equal(zeroIPv6) { // TODO: Can we REDIRECT with IPv6? args = append(args, "-j", "REDIRECT", "--to-ports", fmt.Sprintf("%d", proxyPort)) } else { // TODO: Can we DNAT with IPv6? args = append(args, "-j", "DNAT", "--to-destination", net.JoinHostPort(proxyIP.String(), strconv.Itoa(proxyPort))) } return args } // Build a slice of iptables args for a from-host portal rule. func (proxier *Proxier) iptablesHostPortalArgs(destIP net.IP, addDstLocalMatch bool, destPort int, protocol api.Protocol, proxyIP net.IP, proxyPort int, service proxy.ServicePortName) []string { args := iptablesCommonPortalArgs(destIP, false, addDstLocalMatch, destPort, protocol, service) // This is tricky. // // If the proxy is bound (see Proxier.listenIP) to 0.0.0.0 ("any // interface") we want to do the same as from-container traffic and use // REDIRECT. Except that it doesn't work (empirically). REDIRECT on // localpackets sends the traffic to localhost (special case, but it is // documented) but the response comes from the eth0 IP (not sure why, // truthfully), which makes DNS unhappy. // // So we have to use DNAT. DNAT to 127.0.0.1 can't work for the same // reason. // // So we do our best to find an interface that is not a loopback and // DNAT to that. This works (again, empirically). // // If the proxy is bound to a specific IP, then we have to use DNAT to // that IP. Unlike the previous case, this works because the proxy is // ONLY listening on that IP, not the bridge. // // If the proxy is bound to localhost only, this should work, but we // don't allow it for now. if proxyIP.Equal(zeroIPv4) || proxyIP.Equal(zeroIPv6) { proxyIP = proxier.hostIP } // TODO: Can we DNAT with IPv6? args = append(args, "-j", "DNAT", "--to-destination", net.JoinHostPort(proxyIP.String(), strconv.Itoa(proxyPort))) return args } // Build a slice of iptables args for a from-container public-port rule. // See iptablesContainerPortalArgs // TODO: Should we just reuse iptablesContainerPortalArgs? func (proxier *Proxier) iptablesContainerNodePortArgs(nodePort int, protocol api.Protocol, proxyIP net.IP, proxyPort int, service proxy.ServicePortName) []string { args := iptablesCommonPortalArgs(nil, false, false, nodePort, protocol, service) if proxyIP.Equal(zeroIPv4) || proxyIP.Equal(zeroIPv6) { // TODO: Can we REDIRECT with IPv6? args = append(args, "-j", "REDIRECT", "--to-ports", fmt.Sprintf("%d", proxyPort)) } else { // TODO: Can we DNAT with IPv6? args = append(args, "-j", "DNAT", "--to-destination", net.JoinHostPort(proxyIP.String(), strconv.Itoa(proxyPort))) } return args } // Build a slice of iptables args for a from-host public-port rule. // See iptablesHostPortalArgs // TODO: Should we just reuse iptablesHostPortalArgs? func (proxier *Proxier) iptablesHostNodePortArgs(nodePort int, protocol api.Protocol, proxyIP net.IP, proxyPort int, service proxy.ServicePortName) []string { args := iptablesCommonPortalArgs(nil, false, false, nodePort, protocol, service) if proxyIP.Equal(zeroIPv4) || proxyIP.Equal(zeroIPv6) { proxyIP = proxier.hostIP } // TODO: Can we DNAT with IPv6? args = append(args, "-j", "DNAT", "--to-destination", net.JoinHostPort(proxyIP.String(), strconv.Itoa(proxyPort))) return args } func isTooManyFDsError(err error) bool { return strings.Contains(err.Error(), "too many open files") } func isClosedError(err error) bool { // A brief discussion about handling closed error here: // https://code.google.com/p/go/issues/detail?id=4373#c14 // TODO: maybe create a stoppable TCP listener that returns a StoppedError return strings.HasSuffix(err.Error(), "use of closed network connection") }
socaa/kubernetes
pkg/proxy/userspace/proxier.go
GO
apache-2.0
39,220
// +build go1.9 // Copyright 2019 Microsoft Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // This code was auto-generated by: // github.com/Azure/azure-sdk-for-go/tools/profileBuilder package marketplaceorderingapi import original "github.com/Azure/azure-sdk-for-go/services/marketplaceordering/mgmt/2015-06-01/marketplaceordering/marketplaceorderingapi" type MarketplaceAgreementsClientAPI = original.MarketplaceAgreementsClientAPI type OperationsClientAPI = original.OperationsClientAPI
ironcladlou/origin
vendor/github.com/Azure/azure-sdk-for-go/profiles/preview/marketplaceordering/mgmt/marketplaceordering/marketplaceorderingapi/models.go
GO
apache-2.0
1,018
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/compiler/xla/client/padding.h" #include <algorithm> #include "tensorflow/core/lib/math/math_util.h" #include "tensorflow/core/platform/logging.h" namespace xla { std::vector<std::pair<int64, int64>> MakePadding( tensorflow::gtl::ArraySlice<int64> input_dimensions, tensorflow::gtl::ArraySlice<int64> window_dimensions, tensorflow::gtl::ArraySlice<int64> window_strides, Padding padding) { CHECK_EQ(input_dimensions.size(), window_dimensions.size()); CHECK_EQ(input_dimensions.size(), window_strides.size()); std::vector<std::pair<int64, int64>> low_high_padding; switch (padding) { case Padding::kValid: low_high_padding.resize(window_dimensions.size(), {0, 0}); return low_high_padding; case Padding::kSame: for (int64 i = 0; i < input_dimensions.size(); ++i) { int64 input_dimension = input_dimensions[i]; int64 window_dimension = window_dimensions[i]; int64 window_stride = window_strides[i]; // We follow the same convention as in Tensorflow, such that // output dimension := ceil(input_dimension / window_stride). // See tensorflow/tensorflow/python/ops/nn.py // for the reference. See also tensorflow/core/kernels/ops_util.cc // for the part where we avoid negative padding using max(0, x). // // // For an odd sized window dimension 2N+1 with stride 1, the middle // element is always inside the base area, so we can see it as N + 1 + // N elements. In the example below, we have a kernel of size // 2*3+1=7 so that the center element is 4 with 123 to the // left and 567 to the right. // // base area: ------------------------ // kernel at left: 1234567 // kernel at right: 1234567 // // We can see visually here that we need to pad the base area // by 3 on each side: // // padded base area: 000------------------------000 // // For an even number 2N, there are two options: // // *** Option A // // We view 2N as (N - 1) + 1 + N, so for N=3 we have 12 to the // left, 3 is the center and 456 is to the right, like this: // // base area: ------------------------ // kernel at left: 123456 // kernel at right: 123456 // padded base area: 00------------------------000 // // Note how we pad by one more to the right than to the left. // // *** Option B // // We view 2N as N + 1 + (N - 1), so for N=3 we have 123 to // the left, 4 is the center and 56 is to the right, like // this: // // base area: ------------------------ // kernel at left: 123456 // kernel at right: 123456 // padded base area: 000------------------------00 // // The choice here is arbitrary. We choose option A as this is // what DistBelief and Tensorflow do. // // When the stride is greater than 1, the output size is smaller than // the input base size. The base area is padded such that the last // window fully fits in the padded base area, and the padding amount is // evenly divided between the left and the right (or 1 more on the right // if odd size padding is required). The example below shows the // required padding when the base size is 10, the kernel size is 5, and // the stride is 3. In this example, the output size is 4. // // base area: ---------- // 1'st kernel: 12345 // 2'nd kernel: 12345 // 3'rd kernel: 12345 // 4'th kernel: 12345 // padded base area: 00----------00 int64 output_dimension = tensorflow::MathUtil::CeilOfRatio(input_dimension, window_stride); int64 padding_size = std::max<int64>((output_dimension - 1) * window_stride + window_dimension - input_dimension, 0); low_high_padding.emplace_back( tensorflow::MathUtil::FloorOfRatio(padding_size, 2ll), tensorflow::MathUtil::CeilOfRatio(padding_size, 2ll)); } break; } return low_high_padding; } } // namespace xla
AsimmHirani/ISpyPi
tensorflow/contrib/tensorflow-master/tensorflow/compiler/xla/client/padding.cc
C++
apache-2.0
5,186
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import java.io.File; import java.io.IOException; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.common.GenerationStamp; import org.apache.hadoop.hdfs.server.common.Storage; /** * * CreateEditsLog * Synopsis: CreateEditsLog -f numFiles StartingBlockId numBlocksPerFile * [-r replicafactor] [-d editsLogDirectory] * Default replication factor is 1 * Default edits log directory is /tmp/EditsLogOut * * Create a name node's edits log in /tmp/EditsLogOut. * The file /tmp/EditsLogOut/current/edits can be copied to a name node's * dfs.namenode.name.dir/current direcotry and the name node can be started as usual. * * The files are created in /createdViaInjectingInEditsLog * The file names contain the starting and ending blockIds; hence once can * create multiple edits logs using this command using non overlapping * block ids and feed the files to a single name node. * * See Also @link #DataNodeCluster for injecting a set of matching * blocks created with this command into a set of simulated data nodes. * */ public class CreateEditsLog { static final String BASE_PATH = "/createdViaInjectingInEditsLog"; static final String EDITS_DIR = "/tmp/EditsLogOut"; static String edits_dir = EDITS_DIR; static final public long BLOCK_GENERATION_STAMP = GenerationStamp.LAST_RESERVED_STAMP; static void addFiles(FSEditLog editLog, int numFiles, short replication, int blocksPerFile, long startingBlockId, long blockSize, FileNameGenerator nameGenerator) { PermissionStatus p = new PermissionStatus("joeDoe", "people", new FsPermission((short)0777)); INodeId inodeId = new INodeId(); INodeDirectory dirInode = new INodeDirectory(inodeId.nextValue(), null, p, 0L); editLog.logMkDir(BASE_PATH, dirInode); BlockInfo[] blocks = new BlockInfo[blocksPerFile]; for (int iB = 0; iB < blocksPerFile; ++iB) { blocks[iB] = new BlockInfo(new Block(0, blockSize, BLOCK_GENERATION_STAMP), replication); } long currentBlockId = startingBlockId; long bidAtSync = startingBlockId; for (int iF = 0; iF < numFiles; iF++) { for (int iB = 0; iB < blocksPerFile; ++iB) { blocks[iB].setBlockId(currentBlockId++); } final INodeFile inode = new INodeFile(inodeId.nextValue(), null, p, 0L, 0L, blocks, replication, blockSize, (byte)0); inode.toUnderConstruction("", ""); // Append path to filename with information about blockIDs String path = "_" + iF + "_B" + blocks[0].getBlockId() + "_to_B" + blocks[blocksPerFile-1].getBlockId() + "_"; String filePath = nameGenerator.getNextFileName(""); filePath = filePath + path; // Log the new sub directory in edits if ((iF % nameGenerator.getFilesPerDirectory()) == 0) { String currentDir = nameGenerator.getCurrentDir(); dirInode = new INodeDirectory(inodeId.nextValue(), null, p, 0L); editLog.logMkDir(currentDir, dirInode); } INodeFile fileUc = new INodeFile(inodeId.nextValue(), null, p, 0L, 0L, BlockInfo.EMPTY_ARRAY, replication, blockSize, (byte)0); fileUc.toUnderConstruction("", ""); editLog.logOpenFile(filePath, fileUc, false, false); editLog.logCloseFile(filePath, inode); if (currentBlockId - bidAtSync >= 2000) { // sync every 2K blocks editLog.logSync(); bidAtSync = currentBlockId; } } System.out.println("Created edits log in directory " + edits_dir); System.out.println(" containing " + numFiles + " File-Creates, each file with " + blocksPerFile + " blocks"); System.out.println(" blocks range: " + startingBlockId + " to " + (currentBlockId-1)); } static final String usage = "Usage: createditlogs " + " -f numFiles startingBlockIds NumBlocksPerFile [-r replicafactor] " + "[-d editsLogDirectory]\n" + " Default replication factor is 1\n" + " Default edits log direcory is " + EDITS_DIR + "\n"; static void printUsageExit() { System.out.println(usage); System.exit(-1); } static void printUsageExit(String err) { System.out.println(err); printUsageExit(); } /** * @param args arguments * @throws IOException */ public static void main(String[] args) throws IOException { long startingBlockId = 1; int numFiles = 0; short replication = 1; int numBlocksPerFile = 0; long blockSize = 10; if (args.length == 0) { printUsageExit(); } for (int i = 0; i < args.length; i++) { // parse command line if (args[i].equals("-h")) printUsageExit(); if (args[i].equals("-f")) { if (i + 3 >= args.length || args[i+1].startsWith("-") || args[i+2].startsWith("-") || args[i+3].startsWith("-")) { printUsageExit( "Missing num files, starting block and/or number of blocks"); } numFiles = Integer.parseInt(args[++i]); startingBlockId = Integer.parseInt(args[++i]); numBlocksPerFile = Integer.parseInt(args[++i]); if (numFiles <=0 || numBlocksPerFile <= 0) { printUsageExit("numFiles and numBlocksPerFile most be greater than 0"); } } else if (args[i].equals("-l")) { if (i + 1 >= args.length) { printUsageExit( "Missing block length"); } blockSize = Long.parseLong(args[++i]); } else if (args[i].equals("-r") || args[i+1].startsWith("-")) { if (i + 1 >= args.length) { printUsageExit( "Missing replication factor"); } replication = Short.parseShort(args[++i]); } else if (args[i].equals("-d")) { if (i + 1 >= args.length || args[i+1].startsWith("-")) { printUsageExit("Missing edits logs directory"); } edits_dir = args[++i]; } else { printUsageExit(); } } File editsLogDir = new File(edits_dir); File subStructureDir = new File(edits_dir + "/" + Storage.STORAGE_DIR_CURRENT); if ( !editsLogDir.exists() ) { if ( !editsLogDir.mkdir()) { System.out.println("cannot create " + edits_dir); System.exit(-1); } } if ( !subStructureDir.exists() ) { if ( !subStructureDir.mkdir()) { System.out.println("cannot create subdirs of " + edits_dir); System.exit(-1); } } FileNameGenerator nameGenerator = new FileNameGenerator(BASE_PATH, 100); FSEditLog editLog = FSImageTestUtil.createStandaloneEditLog(editsLogDir); editLog.openForWrite(); addFiles(editLog, numFiles, replication, numBlocksPerFile, startingBlockId, blockSize, nameGenerator); editLog.logSync(); editLog.close(); } }
ZhangXFeng/hadoop
src/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/CreateEditsLog.java
Java
apache-2.0
8,019